diff --git a/.gitignore b/.gitignore index 31f2aa5fc66..add7a22e840 100644 --- a/.gitignore +++ b/.gitignore @@ -1,37 +1,42 @@ + +# intellij files .idea/ -.gradle/ *.iml *.ipr *.iws -work/ -/data/ -logs/ -.DS_Store -build/ -generated-resources/ -**/.local* -docs/html/ -docs/build.log -/tmp/ -backwards/ -html_docs -.vagrant/ -## eclipse ignores (use 'mvn eclipse:eclipse' to build eclipse projects) -## All files (.project, .classpath, .settings/*) should be generated through Maven which -## will correctly set the classpath based on the declared dependencies and write settings -## files to ensure common coding style across Eclipse and IDEA. +# eclipse files .project .classpath eclipse-build .settings -## netbeans ignores +# netbeans files nb-configuration.xml nbactions.xml -dependency-reduced-pom.xml +# gradle stuff +.gradle/ +build/ +generated-resources/ -# old patterns specific to maven +# maven stuff (to be removed when trunk becomes 4.x) *-execution-hints.log target/ +dependency-reduced-pom.xml + +# testing stuff +**/.local* +.vagrant/ + +# osx stuff +.DS_Store + +# needed in case docs build is run...maybe we can configure doc build to generate files under build? +html_docs + +# random old stuff that we should look at the necessity of... +/tmp/ +backwards/ + + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 507a27a5912..070ea23d4e0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -84,7 +84,9 @@ Please follow these formatting guidelines: * Line width is 140 characters * The rest is left to Java coding standards * Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do. -* Don't worry too much about imports. Try not to change the order but don't worry about fighting your IDE to stop it from switching from * imports to specific imports or from specific to * imports. +* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. Please attempt to tame your IDE so it doesn't make them and please send a PR against this document with instructions for your IDE if it doesn't contain them. + * Eclipse: Preferences->Java->Code Style->Organize Imports. There are two boxes labeled "`Number of (static )? imports needed for .*`". Set their values to 99999 or some other absurdly high value. +* Don't worry too much about import order. Try not to change it but don't worry about fighting your IDE to stop it from doing so. To create a distribution from the source, simply run: diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 569c16b0747..fef23d0cd3d 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -290,14 +290,14 @@ The REST tests are run automatically when executing the "gradle check" command. REST tests use the following command: --------------------------------------------------------------------------- -gradle :distribution:tar:integTest \ +gradle :distribution:integ-test-zip:integTest \ -Dtests.class=org.elasticsearch.test.rest.RestIT --------------------------------------------------------------------------- A specific test case can be run with --------------------------------------------------------------------------- -gradle :distribution:tar:integTest \ +gradle :distribution:integ-test-zip:integTest \ -Dtests.class=org.elasticsearch.test.rest.RestIT \ -Dtests.method="test {p0=cat.shards/10_basic/Help}" --------------------------------------------------------------------------- diff --git a/build.gradle b/build.gradle index 831db456a19..c31fe88f5d2 100644 --- a/build.gradle +++ b/build.gradle @@ -109,7 +109,7 @@ subprojects { ext.projectSubstitutions = [ "org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec', "org.elasticsearch:elasticsearch:${version}": ':core', - "org.elasticsearch:test-framework:${version}": ':test-framework', + "org.elasticsearch.test:framework:${version}": ':test:framework', "org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:integ-test-zip', "org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip', "org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar', @@ -131,8 +131,8 @@ subprojects { // the dependency is added. gradle.projectsEvaluated { allprojects { - if (project.path == ':test-framework') { - // :test-framework:test cannot run before and after :core:test + if (project.path == ':test:framework') { + // :test:framework:test cannot run before and after :core:test return } configurations.all { @@ -169,6 +169,30 @@ gradle.projectsEvaluated { // intellij configuration allprojects { apply plugin: 'idea' + + idea { + module { + // same as for the IntelliJ Gradle tooling integration + inheritOutputDirs = false + outputDir = file('build/classes/main') + testOutputDir = file('build/classes/test') + + iml { + // fix so that Gradle idea plugin properly generates support for resource folders + // see also https://issues.gradle.org/browse/GRADLE-2975 + withXml { + it.asNode().component.content.sourceFolder.findAll { it.@url == 'file://$MODULE_DIR$/src/main/resources' }.each { + it.attributes().remove('isTestSource') + it.attributes().put('type', 'java-resource') + } + it.asNode().component.content.sourceFolder.findAll { it.@url == 'file://$MODULE_DIR$/src/test/resources' }.each { + it.attributes().remove('isTestSource') + it.attributes().put('type', 'java-test-resource') + } + } + } + } + } } idea { diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index e46f9cb33c0..a0f06343d30 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -63,6 +63,7 @@ dependencies { compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE.... compile 'de.thetaphi:forbiddenapis:2.0' compile 'com.bmuschko:gradle-nexus-plugin:2.3.1' + compile 'org.apache.rat:apache-rat:0.11' } processResources { diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy index ccb5d5904bf..b28e7210ea4 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy @@ -2,7 +2,6 @@ package com.carrotsearch.gradle.junit4 import com.carrotsearch.ant.tasks.junit4.ListenersList import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener -import com.esotericsoftware.kryo.serializers.FieldSerializer import groovy.xml.NamespaceBuilder import groovy.xml.NamespaceBuilderSupport import org.apache.tools.ant.BuildException @@ -14,7 +13,10 @@ import org.gradle.api.file.FileCollection import org.gradle.api.file.FileTreeElement import org.gradle.api.internal.tasks.options.Option import org.gradle.api.specs.Spec -import org.gradle.api.tasks.* +import org.gradle.api.tasks.Input +import org.gradle.api.tasks.InputDirectory +import org.gradle.api.tasks.Optional +import org.gradle.api.tasks.TaskAction import org.gradle.api.tasks.util.PatternFilterable import org.gradle.api.tasks.util.PatternSet import org.gradle.logging.ProgressLoggerFactory diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy index 5e919ce9ba2..14f5d476be3 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy @@ -27,10 +27,13 @@ import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedTestResultE import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener import org.gradle.logging.ProgressLogger import org.gradle.logging.ProgressLoggerFactory -import org.junit.runner.Description -import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.* import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDurationInSeconds +import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.ERROR +import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.FAILURE +import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.IGNORED +import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.IGNORED_ASSUMPTION +import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.OK import static java.lang.Math.max /** diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy index 0813713353f..450d3645182 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy @@ -5,8 +5,21 @@ import com.carrotsearch.ant.tasks.junit4.Pluralize import com.carrotsearch.ant.tasks.junit4.TestsSummaryEventListener import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.base.Strings import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.eventbus.Subscribe -import com.carrotsearch.ant.tasks.junit4.events.* -import com.carrotsearch.ant.tasks.junit4.events.aggregated.* +import com.carrotsearch.ant.tasks.junit4.events.EventType +import com.carrotsearch.ant.tasks.junit4.events.IEvent +import com.carrotsearch.ant.tasks.junit4.events.IStreamEvent +import com.carrotsearch.ant.tasks.junit4.events.SuiteStartedEvent +import com.carrotsearch.ant.tasks.junit4.events.TestFinishedEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedQuitEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedResultEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedStartEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedSuiteResultEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedSuiteStartedEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedTestResultEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.ChildBootstrap +import com.carrotsearch.ant.tasks.junit4.events.aggregated.HeartBeatEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.PartialOutputEvent +import com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus import com.carrotsearch.ant.tasks.junit4.events.mirrors.FailureMirror import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener import com.carrotsearch.ant.tasks.junit4.listeners.StackTraceFilter @@ -15,16 +28,17 @@ import org.gradle.api.logging.LogLevel import org.gradle.api.logging.Logger import org.junit.runner.Description +import javax.sound.sampled.AudioSystem +import javax.sound.sampled.Clip +import javax.sound.sampled.Line +import javax.sound.sampled.LineEvent +import javax.sound.sampled.LineListener import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicInteger -import javax.sound.sampled.AudioSystem; -import javax.sound.sampled.Clip; -import javax.sound.sampled.Line; -import javax.sound.sampled.LineEvent; -import javax.sound.sampled.LineListener; - -import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.* +import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDescription +import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDurationInSeconds +import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatTime import static com.carrotsearch.gradle.junit4.TestLoggingConfiguration.OutputMode class TestReportLogger extends TestsSummaryEventListener implements AggregatedEventListener { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy new file mode 100644 index 00000000000..0393e7632bb --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/AntTask.groovy @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle + +import org.apache.tools.ant.BuildListener +import org.apache.tools.ant.BuildLogger +import org.apache.tools.ant.DefaultLogger +import org.apache.tools.ant.Project +import org.gradle.api.DefaultTask +import org.gradle.api.GradleException +import org.gradle.api.tasks.Input +import org.gradle.api.tasks.TaskAction + +import java.nio.charset.Charset + +/** + * A task which will run ant commands. + * + * Logging for the task is customizable for subclasses by overriding makeLogger. + */ +public abstract class AntTask extends DefaultTask { + + /** + * A buffer that will contain the output of the ant code run, + * if the output was not already written directly to stdout. + */ + public final ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream() + + @TaskAction + final void executeTask() { + AntBuilder ant = new AntBuilder() + + // remove existing loggers, we add our own + List toRemove = new ArrayList<>(); + for (BuildListener listener : ant.project.getBuildListeners()) { + if (listener instanceof BuildLogger) { + toRemove.add(listener); + } + } + for (BuildLogger listener : toRemove) { + ant.project.removeBuildListener(listener) + } + + // otherwise groovy replaces System.out, and you have no chance to debug + // ant.saveStreams = false + + final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : Project.MSG_INFO + final PrintStream stream = useStdout() ? System.out : new PrintStream(outputBuffer, true, Charset.defaultCharset().name()) + BuildLogger antLogger = makeLogger(stream, outputLevel) + + ant.project.addBuildListener(antLogger) + try { + runAnt(ant) + } catch (Exception e) { + // ant failed, so see if we have buffered output to emit, then rethrow the failure + String buffer = outputBuffer.toString() + if (buffer.isEmpty() == false) { + logger.error("=== Ant output ===\n${buffer}") + } + throw e + } + } + + /** Runs the doAnt closure. This can be overridden by subclasses instead of having to set a closure. */ + protected abstract void runAnt(AntBuilder ant) + + /** Create the logger the ant runner will use, with the given stream for error/output. */ + protected BuildLogger makeLogger(PrintStream stream, int outputLevel) { + return new DefaultLogger( + errorPrintStream: stream, + outputPrintStream: stream, + messageOutputLevel: outputLevel) + } + + /** + * Returns true if the ant logger should write to stdout, or false if to the buffer. + * The default implementation writes to the buffer when gradle info logging is disabled. + */ + protected boolean useStdout() { + return logger.isInfoEnabled() + } + + +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index c4d0ced6b5c..b8fd793ef23 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -18,22 +18,30 @@ */ package org.elasticsearch.gradle -import org.gradle.process.ExecResult - -import java.time.ZonedDateTime -import java.time.ZoneOffset - import nebula.plugin.extraconfigurations.ProvidedBasePlugin import org.elasticsearch.gradle.precommit.PrecommitTasks -import org.gradle.api.* -import org.gradle.api.artifacts.* +import org.gradle.api.GradleException +import org.gradle.api.JavaVersion +import org.gradle.api.Plugin +import org.gradle.api.Project +import org.gradle.api.Task +import org.gradle.api.XmlProvider +import org.gradle.api.artifacts.Configuration +import org.gradle.api.artifacts.ModuleDependency +import org.gradle.api.artifacts.ModuleVersionIdentifier +import org.gradle.api.artifacts.ProjectDependency +import org.gradle.api.artifacts.ResolvedArtifact import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.api.artifacts.maven.MavenPom import org.gradle.api.tasks.bundling.Jar import org.gradle.api.tasks.compile.JavaCompile import org.gradle.internal.jvm.Jvm +import org.gradle.process.ExecResult import org.gradle.util.GradleVersion +import java.time.ZoneOffset +import java.time.ZonedDateTime + /** * Encapsulates build configuration for elasticsearch projects. */ @@ -190,6 +198,10 @@ class BuildPlugin implements Plugin { * to iterate the transitive dependencies and add excludes. */ static void configureConfigurations(Project project) { + // we are not shipping these jars, we act like dumb consumers of these things + if (project.path.startsWith(':test:fixtures')) { + return + } // fail on any conflicting dependency versions project.configurations.all({ Configuration configuration -> if (configuration.name.startsWith('_transitive_')) { @@ -197,12 +209,16 @@ class BuildPlugin implements Plugin { // we just have them to find *what* transitive deps exist return } + if (configuration.name.endsWith('Fixture')) { + // just a self contained test-fixture configuration, likely transitive and hellacious + return + } configuration.resolutionStrategy.failOnVersionConflict() }) // force all dependencies added directly to compile/testCompile to be non-transitive, except for ES itself Closure disableTransitiveDeps = { ModuleDependency dep -> - if (!(dep instanceof ProjectDependency) && dep.getGroup() != 'org.elasticsearch') { + if (!(dep instanceof ProjectDependency) && dep.group.startsWith('org.elasticsearch') == false) { dep.transitive = false // also create a configuration just for this dependency version, so that later @@ -302,6 +318,7 @@ class BuildPlugin implements Plugin { options.compilerArgs << '-profile' << project.compactProfile } options.encoding = 'UTF-8' + //options.incremental = true } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/EmptyDirTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/EmptyDirTask.groovy index d2059bc4719..12971b7d701 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/EmptyDirTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/EmptyDirTask.groovy @@ -19,9 +19,10 @@ package org.elasticsearch.gradle import org.gradle.api.DefaultTask -import org.gradle.api.tasks.* +import org.gradle.api.tasks.Input +import org.gradle.api.tasks.TaskAction import org.gradle.internal.nativeintegration.filesystem.Chmod -import java.io.File + import javax.inject.Inject /** diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/FileContentsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/FileContentsTask.groovy index 4df6d1b32df..248083af5e0 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/FileContentsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/FileContentsTask.groovy @@ -19,8 +19,9 @@ package org.elasticsearch.gradle import org.gradle.api.DefaultTask -import org.gradle.api.tasks.* -import java.io.File +import org.gradle.api.tasks.Input +import org.gradle.api.tasks.OutputFile +import org.gradle.api.tasks.TaskAction /** * Creates a file and sets it contents to something. diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 0d936ab0e15..042e8d22529 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -22,7 +22,7 @@ import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.gradle.api.Project -import org.gradle.api.Task +import org.gradle.api.artifacts.Dependency import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Zip @@ -60,7 +60,7 @@ public class PluginBuildPlugin extends BuildPlugin { private static void configureDependencies(Project project) { project.dependencies { provided "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}" - testCompile "org.elasticsearch:test-framework:${project.versions.elasticsearch}" + testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}" // we "upgrade" these optional deps to provided for plugins, since they will run // with a full elasticsearch server that includes optional deps provided "com.spatial4j:spatial4j:${project.versions.spatial4j}" @@ -101,6 +101,11 @@ public class PluginBuildPlugin extends BuildPlugin { from pluginMetadata // metadata (eg custom security policy) from project.jar // this plugin's jar from project.configurations.runtime - project.configurations.provided // the dep jars + // hack just for slf4j, in case it is "upgrade" from provided to compile, + // since it is not actually provided in distributions + from project.configurations.runtime.fileCollection { Dependency dep -> + return dep.name == 'slf4j-api' && project.configurations.compile.dependencies.contains(dep) + } // extra files for the plugin to go into the zip from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging from('src/main') { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy index ce6b3958eca..dd5bcaedb0b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy @@ -20,7 +20,6 @@ package org.elasticsearch.gradle.plugin import org.gradle.api.Project import org.gradle.api.tasks.Input -import org.gradle.api.tasks.Optional /** * A container for plugin properties that will be written to the plugin descriptor, for easy diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy index e2f10100269..6fa37be309e 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy @@ -18,7 +18,9 @@ */ package org.elasticsearch.gradle.precommit -import org.gradle.api.* +import org.gradle.api.DefaultTask +import org.gradle.api.GradleException +import org.gradle.api.InvalidUserDataException import org.gradle.api.file.FileCollection import org.gradle.api.tasks.Input import org.gradle.api.tasks.InputDirectory diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy index 6809adca946..7d8982e3f2d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy @@ -61,6 +61,7 @@ public class ForbiddenPatternsTask extends DefaultTask { // add mandatory rules patterns.put('nocommit', /nocommit/) patterns.put('tab', /\t/) + patterns.put('wildcard imports', /^\s*import.*\.\*/) inputs.property("excludes", filesFilter.excludes) inputs.property("rules", patterns) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy new file mode 100644 index 00000000000..39cf55c905b --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -0,0 +1,122 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.precommit + +import org.apache.rat.anttasks.Report +import org.apache.rat.anttasks.SubstringLicenseMatcher +import org.apache.rat.license.SimpleLicenseFamily +import org.elasticsearch.gradle.AntTask +import org.gradle.api.tasks.SourceSet + +import java.nio.file.Files + +/** + * Checks files for license headers. + *

+ * This is a port of the apache lucene check + */ +public class LicenseHeadersTask extends AntTask { + + LicenseHeadersTask() { + description = "Checks sources for missing, incorrect, or unacceptable license headers" + } + + @Override + protected void runAnt(AntBuilder ant) { + ant.project.addTaskDefinition('ratReport', Report) + ant.project.addDataTypeDefinition('substringMatcher', SubstringLicenseMatcher) + ant.project.addDataTypeDefinition('approvedLicense', SimpleLicenseFamily) + + // create a file for the log to go to under reports/ + File reportDir = new File(project.buildDir, "reports/licenseHeaders") + reportDir.mkdirs() + File reportFile = new File(reportDir, "rat.log") + Files.deleteIfExists(reportFile.toPath()) + + // run rat, going to the file + ant.ratReport(reportFile: reportFile.absolutePath, addDefaultLicenseMatchers: true) { + // checks all the java sources (allJava) + for (SourceSet set : project.sourceSets) { + for (File dir : set.allJava.srcDirs) { + // sometimes these dirs don't exist, e.g. site-plugin has no actual java src/main... + if (dir.exists()) { + ant.fileset(dir: dir) + } + } + } + + // BSD 4-clause stuff (is disallowed below) + // we keep this here, in case someone adds BSD code for some reason, it should never be allowed. + substringMatcher(licenseFamilyCategory: "BSD4 ", + licenseFamilyName: "Original BSD License (with advertising clause)") { + pattern(substring: "All advertising materials") + } + + // Apache + substringMatcher(licenseFamilyCategory: "AL ", + licenseFamilyName: "Apache") { + // Apache license (ES) + pattern(substring: "Licensed to Elasticsearch under one or more contributor") + // Apache license (ASF) + pattern(substring: "Licensed to the Apache Software Foundation (ASF) under") + // this is the old-school one under some files + pattern(substring: "Licensed under the Apache License, Version 2.0 (the \"License\")") + } + + // Generated resources + substringMatcher(licenseFamilyCategory: "GEN ", + licenseFamilyName: "Generated") { + // parsers generated by antlr + pattern(substring: "ANTLR GENERATED CODE") + } + + // approved categories + approvedLicense(familyName: "Apache") + approvedLicense(familyName: "Generated") + } + + // check the license file for any errors, this should be fast. + boolean zeroUnknownLicenses = false + boolean foundProblemsWithFiles = false + reportFile.eachLine('UTF-8') { line -> + if (line.startsWith("0 Unknown Licenses")) { + zeroUnknownLicenses = true + } + + if (line.startsWith(" !")) { + foundProblemsWithFiles = true + } + } + + if (zeroUnknownLicenses == false || foundProblemsWithFiles) { + // print the unapproved license section, usually its all you need to fix problems. + int sectionNumber = 0 + reportFile.eachLine('UTF-8') { line -> + if (line.startsWith("*******************************")) { + sectionNumber++ + } else { + if (sectionNumber == 2) { + logger.error(line) + } + } + } + throw new IllegalStateException("License header problems were found! Full details: " + reportFile.absolutePath) + } + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 04878d979e9..f99032e1e2d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -34,7 +34,9 @@ class PrecommitTasks { List precommitTasks = [ configureForbiddenApis(project), project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class), - project.tasks.create('jarHell', JarHellTask.class)] + project.tasks.create('licenseHeaders', LicenseHeadersTask.class), + project.tasks.create('jarHell', JarHellTask.class), + project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)] // tasks with just tests don't need dependency licenses, so this flag makes adding // the task optional diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy new file mode 100644 index 00000000000..5d06103789f --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -0,0 +1,258 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.precommit; + +import org.apache.tools.ant.BuildEvent; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.BuildListener; +import org.apache.tools.ant.BuildLogger; +import org.apache.tools.ant.DefaultLogger; +import org.apache.tools.ant.Project; +import org.elasticsearch.gradle.AntTask; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.file.FileCollection; + +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Basic static checking to keep tabs on third party JARs + */ +public class ThirdPartyAuditTask extends AntTask { + + // patterns for classes to exclude, because we understand their issues + private String[] excludes = new String[0]; + + ThirdPartyAuditTask() { + // we depend on this because its the only reliable configuration + // this probably makes the build slower: gradle you suck here when it comes to configurations, you pay the price. + dependsOn(project.configurations.testCompile); + description = "Checks third party JAR bytecode for missing classes, use of internal APIs, and other horrors'"; + } + + /** + * classes that should be excluded from the scan, + * e.g. because we know what sheisty stuff those particular classes are up to. + */ + public void setExcludes(String[] classes) { + for (String s : classes) { + if (s.indexOf('*') != -1) { + throw new IllegalArgumentException("illegal third party audit exclusion: '" + s + "', wildcards are not permitted!"); + } + } + excludes = classes; + } + + /** + * Returns current list of exclusions. + */ + public String[] getExcludes() { + return excludes; + } + + // yes, we parse Uwe Schindler's errors to find missing classes, and to keep a continuous audit. Just don't let him know! + static final Pattern MISSING_CLASS_PATTERN = + Pattern.compile(/WARNING: The referenced class '(.*)' cannot be loaded\. Please fix the classpath\!/); + + static final Pattern VIOLATION_PATTERN = + Pattern.compile(/\s\sin ([a-zA-Z0-9\$\.]+) \(.*\)/); + + // we log everything and capture errors and handle them with our whitelist + // this is important, as we detect stale whitelist entries, workaround forbidden apis bugs, + // and it also allows whitelisting missing classes! + static class EvilLogger extends DefaultLogger { + final Set missingClasses = new TreeSet<>(); + final Map> violations = new TreeMap<>(); + String previousLine = null; + + @Override + public void messageLogged(BuildEvent event) { + if (event.getTask().getClass() == de.thetaphi.forbiddenapis.ant.AntTask.class) { + if (event.getPriority() == Project.MSG_WARN) { + Matcher m = MISSING_CLASS_PATTERN.matcher(event.getMessage()); + if (m.matches()) { + missingClasses.add(m.group(1).replace('.', '/') + ".class"); + } + } else if (event.getPriority() == Project.MSG_ERR) { + Matcher m = VIOLATION_PATTERN.matcher(event.getMessage()); + if (m.matches()) { + String violation = previousLine + '\n' + event.getMessage(); + String clazz = m.group(1).replace('.', '/') + ".class"; + List current = violations.get(clazz); + if (current == null) { + current = new ArrayList<>(); + violations.put(clazz, current); + } + current.add(violation); + } + previousLine = event.getMessage(); + } + } + super.messageLogged(event); + } + } + + @Override + protected BuildLogger makeLogger(PrintStream stream, int outputLevel) { + DefaultLogger log = new EvilLogger(); + log.errorPrintStream = stream; + log.outputPrintStream = stream; + log.messageOutputLevel = outputLevel; + return log; + } + + @Override + protected void runAnt(AntBuilder ant) { + Configuration configuration = project.configurations.findByName('runtime'); + if (configuration == null) { + // some projects apparently do not have 'runtime'? what a nice inconsistency, + // basically only serves to waste time in build logic! + configuration = project.configurations.findByName('testCompile'); + } + assert configuration != null; + ant.project.addTaskDefinition('thirdPartyAudit', de.thetaphi.forbiddenapis.ant.AntTask); + + // we only want third party dependencies. + FileCollection jars = configuration.fileCollection({ dependency -> + dependency.group.startsWith("org.elasticsearch") == false + }); + + // we don't want provided dependencies, which we have already scanned. e.g. don't + // scan ES core's dependencies for every single plugin + Configuration provided = project.configurations.findByName('provided'); + if (provided != null) { + jars -= provided; + } + + // no dependencies matched, we are done + if (jars.isEmpty()) { + return; + } + + // print which jars we are going to scan, always + // this is not the time to try to be succinct! Forbidden will print plenty on its own! + Set names = new TreeSet<>(); + for (File jar : jars) { + names.add(jar.getName()); + } + + // TODO: forbidden-apis + zipfileset gives O(n^2) behavior unless we dump to a tmpdir first, + // and then remove our temp dir afterwards. don't complain: try it yourself. + // we don't use gradle temp dir handling, just google it, or try it yourself. + + File tmpDir = new File(project.buildDir, 'tmp/thirdPartyAudit'); + + // clean up any previous mess (if we failed), then unzip everything to one directory + ant.delete(dir: tmpDir.getAbsolutePath()); + tmpDir.mkdirs(); + for (File jar : jars) { + ant.unzip(src: jar.getAbsolutePath(), dest: tmpDir.getAbsolutePath()); + } + + // convert exclusion class names to binary file names + String[] excludedFiles = new String[excludes.length]; + for (int i = 0; i < excludes.length; i++) { + excludedFiles[i] = excludes[i].replace('.', '/') + ".class"; + } + Set excludedSet = new TreeSet<>(Arrays.asList(excludedFiles)); + + // jarHellReprise + Set sheistySet = getSheistyClasses(tmpDir.toPath()); + + try { + ant.thirdPartyAudit(internalRuntimeForbidden: false, + failOnUnsupportedJava: false, + failOnMissingClasses: false, + signaturesFile: new File(getClass().getResource('/forbidden/third-party-audit.txt').toURI()), + classpath: configuration.asPath) { + fileset(dir: tmpDir) + } + } catch (BuildException ignore) {} + + EvilLogger evilLogger = null; + for (BuildListener listener : ant.project.getBuildListeners()) { + if (listener instanceof EvilLogger) { + evilLogger = (EvilLogger) listener; + break; + } + } + assert evilLogger != null; + + // keep our whitelist up to date + Set bogusExclusions = new TreeSet<>(excludedSet); + bogusExclusions.removeAll(sheistySet); + bogusExclusions.removeAll(evilLogger.missingClasses); + bogusExclusions.removeAll(evilLogger.violations.keySet()); + if (!bogusExclusions.isEmpty()) { + throw new IllegalStateException("Invalid exclusions, nothing is wrong with these classes: " + bogusExclusions); + } + + // don't duplicate classes with the JDK + sheistySet.removeAll(excludedSet); + if (!sheistySet.isEmpty()) { + throw new IllegalStateException("JAR HELL WITH JDK! " + sheistySet); + } + + // don't allow a broken classpath + evilLogger.missingClasses.removeAll(excludedSet); + if (!evilLogger.missingClasses.isEmpty()) { + throw new IllegalStateException("CLASSES ARE MISSING! " + evilLogger.missingClasses); + } + + // don't use internal classes + evilLogger.violations.keySet().removeAll(excludedSet); + if (!evilLogger.violations.isEmpty()) { + throw new IllegalStateException("VIOLATIONS WERE FOUND! " + evilLogger.violations); + } + + // clean up our mess (if we succeed) + ant.delete(dir: tmpDir.getAbsolutePath()); + } + + /** + * check for sheisty classes: if they also exist in the extensions classloader, its jar hell with the jdk! + */ + private Set getSheistyClasses(Path root) { + // system.parent = extensions loader. + // note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!). + // but groovy/gradle needs to work at all first! + ClassLoader ext = ClassLoader.getSystemClassLoader().getParent(); + assert ext != null; + + Set sheistySet = new TreeSet<>(); + Files.walkFileTree(root, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + String entry = root.relativize(file).toString().replace('\\', '/'); + if (entry.endsWith(".class")) { + if (ext.getResource(entry) != null) { + sheistySet.add(entry); + } + } + return FileVisitResult.CONTINUE; + } + }); + return sheistySet; + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 08976dbdb39..4b7c05ec2eb 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -23,11 +23,18 @@ import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.plugin.PluginBuildPlugin -import org.gradle.api.* +import org.gradle.api.AntBuilder +import org.gradle.api.DefaultTask +import org.gradle.api.GradleException +import org.gradle.api.InvalidUserDataException +import org.gradle.api.Project +import org.gradle.api.Task import org.gradle.api.artifacts.Configuration import org.gradle.api.file.FileCollection import org.gradle.api.logging.Logger -import org.gradle.api.tasks.* +import org.gradle.api.tasks.Copy +import org.gradle.api.tasks.Delete +import org.gradle.api.tasks.Exec import java.nio.file.Paths diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/Fixture.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/Fixture.groovy new file mode 100644 index 00000000000..46b81624ba3 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/Fixture.groovy @@ -0,0 +1,287 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.test + +import org.apache.tools.ant.taskdefs.condition.Os +import org.elasticsearch.gradle.AntTask +import org.elasticsearch.gradle.LoggedExec +import org.gradle.api.GradleException +import org.gradle.api.Task +import org.gradle.api.tasks.Exec +import org.gradle.api.tasks.Input + +/** + * A fixture for integration tests which runs in a separate process. + */ +public class Fixture extends AntTask { + + /** The path to the executable that starts the fixture. */ + @Input + String executable + + private final List arguments = new ArrayList<>() + + @Input + public void args(Object... args) { + arguments.addAll(args) + } + + /** + * Environment variables for the fixture process. The value can be any object, which + * will have toString() called at execution time. + */ + private final Map environment = new HashMap<>() + + @Input + public void env(String key, Object value) { + environment.put(key, value) + } + + /** A flag to indicate whether the command should be executed from a shell. */ + @Input + boolean useShell = false + + /** + * A flag to indicate whether the fixture should be run in the foreground, or spawned. + * It is protected so subclasses can override (eg RunTask). + */ + protected boolean spawn = true + + /** + * A closure to call before the fixture is considered ready. The closure is passed the fixture object, + * as well as a groovy AntBuilder, to enable running ant condition checks. The default wait + * condition is for http on the http port. + */ + @Input + Closure waitCondition = { Fixture fixture, AntBuilder ant -> + File tmpFile = new File(fixture.cwd, 'wait.success') + ant.get(src: "http://${fixture.addressAndPort}", + dest: tmpFile.toString(), + ignoreerrors: true, // do not fail on error, so logging information can be flushed + retries: 10) + return tmpFile.exists() + } + + /** A task which will stop this fixture. This should be used as a finalizedBy for any tasks that use the fixture. */ + public final Task stopTask + + public Fixture() { + stopTask = createStopTask() + finalizedBy(stopTask) + } + + @Override + protected void runAnt(AntBuilder ant) { + project.delete(baseDir) // reset everything + cwd.mkdirs() + final String realExecutable + final List realArgs = new ArrayList<>() + final Map realEnv = environment + // We need to choose which executable we are using. In shell mode, or when we + // are spawning and thus using the wrapper script, the executable is the shell. + if (useShell || spawn) { + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + realExecutable = 'cmd' + realArgs.add('/C') + realArgs.add('"') // quote the entire command + } else { + realExecutable = 'sh' + } + } else { + realExecutable = executable + realArgs.addAll(arguments) + } + if (spawn) { + writeWrapperScript(executable) + realArgs.add(wrapperScript) + realArgs.addAll(arguments) + } + if (Os.isFamily(Os.FAMILY_WINDOWS) && (useShell || spawn)) { + realArgs.add('"') + } + commandString.eachLine { line -> logger.info(line) } + + ant.exec(executable: realExecutable, spawn: spawn, dir: cwd, taskname: name) { + realEnv.each { key, value -> env(key: key, value: value) } + realArgs.each { arg(value: it) } + } + + String failedProp = "failed${name}" + // first wait for resources, or the failure marker from the wrapper script + ant.waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: failedProp) { + or { + resourceexists { + file(file: failureMarker.toString()) + } + and { + resourceexists { + file(file: pidFile.toString()) + } + resourceexists { + file(file: portsFile.toString()) + } + } + } + } + + if (ant.project.getProperty(failedProp) || failureMarker.exists()) { + fail("Failed to start ${name}") + } + + // the process is started (has a pid) and is bound to a network interface + // so now wait undil the waitCondition has been met + // TODO: change this to a loop? + boolean success + try { + success = waitCondition(this, ant) == false + } catch (Exception e) { + String msg = "Wait condition caught exception for ${name}" + logger.error(msg, e) + fail(msg, e) + } + if (success == false) { + fail("Wait condition failed for ${name}") + } + } + + /** Returns a debug string used to log information about how the fixture was run. */ + protected String getCommandString() { + String commandString = "\n${name} configuration:\n" + commandString += "-----------------------------------------\n" + commandString += " cwd: ${cwd}\n" + commandString += " command: ${executable} ${arguments.join(' ')}\n" + commandString += ' environment:\n' + environment.each { k, v -> commandString += " ${k}: ${v}\n" } + if (spawn) { + commandString += "\n [${wrapperScript.name}]\n" + wrapperScript.eachLine('UTF-8', { line -> commandString += " ${line}\n"}) + } + return commandString + } + + /** + * Writes a script to run the real executable, so that stdout/stderr can be captured. + * TODO: this could be removed if we do use our own ProcessBuilder and pump output from the process + */ + private void writeWrapperScript(String executable) { + wrapperScript.parentFile.mkdirs() + String argsPasser = '"$@"' + String exitMarker = "; if [ \$? != 0 ]; then touch run.failed; fi" + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + argsPasser = '%*' + exitMarker = "\r\n if \"%errorlevel%\" neq \"0\" ( type nul >> run.failed )" + } + wrapperScript.setText("\"${executable}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8') + } + + /** Fail the build with the given message, and logging relevant info*/ + private void fail(String msg, Exception... suppressed) { + if (logger.isInfoEnabled() == false) { + // We already log the command at info level. No need to do it twice. + commandString.eachLine { line -> logger.error(line) } + } + logger.error("${name} output:") + logger.error("-----------------------------------------") + logger.error(" failure marker exists: ${failureMarker.exists()}") + logger.error(" pid file exists: ${pidFile.exists()}") + logger.error(" ports file exists: ${portsFile.exists()}") + // also dump the log file for the startup script (which will include ES logging output to stdout) + if (runLog.exists()) { + logger.error("\n [log]") + runLog.eachLine { line -> logger.error(" ${line}") } + } + logger.error("-----------------------------------------") + GradleException toThrow = new GradleException(msg) + for (Exception e : suppressed) { + toThrow.addSuppressed(e) + } + throw toThrow + } + + /** Adds a task to kill an elasticsearch node with the given pidfile */ + private Task createStopTask() { + final Fixture fixture = this + final Object pid = "${ -> fixture.pid }" + Exec stop = project.tasks.create(name: "${name}#stop", type: LoggedExec) + stop.onlyIf { fixture.pidFile.exists() } + stop.doFirst { + logger.info("Shutting down ${fixture.name} with pid ${pid}") + } + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + stop.executable = 'Taskkill' + stop.args('/PID', pid, '/F') + } else { + stop.executable = 'kill' + stop.args('-9', pid) + } + stop.doLast { + project.delete(fixture.pidFile) + } + return stop + } + + /** + * A path relative to the build dir that all configuration and runtime files + * will live in for this fixture + */ + protected File getBaseDir() { + return new File(project.buildDir, "fixtures/${name}") + } + + /** Returns the working directory for the process. Defaults to "cwd" inside baseDir. */ + protected File getCwd() { + return new File(baseDir, 'cwd') + } + + /** Returns the file the process writes its pid to. Defaults to "pid" inside baseDir. */ + protected File getPidFile() { + return new File(baseDir, 'pid') + } + + /** Reads the pid file and returns the process' pid */ + public int getPid() { + return Integer.parseInt(pidFile.getText('UTF-8').trim()) + } + + /** Returns the file the process writes its bound ports to. Defaults to "ports" inside baseDir. */ + protected File getPortsFile() { + return new File(baseDir, 'ports') + } + + /** Returns an address and port suitable for a uri to connect to this node over http */ + public String getAddressAndPort() { + return portsFile.readLines("UTF-8").get(0) + } + + /** Returns a file that wraps around the actual command when {@code spawn == true}. */ + protected File getWrapperScript() { + return new File(cwd, Os.isFamily(Os.FAMILY_WINDOWS) ? 'run.bat' : 'run') + } + + /** Returns a file that the wrapper script writes when the command failed. */ + protected File getFailureMarker() { + return new File(cwd, 'run.failed') + } + + /** Returns a file that the wrapper script writes when the command failed. */ + protected File getRunLog() { + return new File(cwd, 'run.log') + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 24bd57a3a59..5656be57b8f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -20,7 +20,7 @@ package org.elasticsearch.gradle.test import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.elasticsearch.gradle.BuildPlugin -import org.gradle.api.Project +import org.gradle.api.GradleException import org.gradle.api.Task import org.gradle.api.internal.tasks.options.Option import org.gradle.api.plugins.JavaBasePlugin @@ -82,4 +82,25 @@ public class RestIntegTestTask extends RandomizedTestingTask { public ClusterConfiguration getCluster() { return clusterConfig } + + @Override + public Task dependsOn(Object... dependencies) { + super.dependsOn(dependencies) + for (Object dependency : dependencies) { + if (dependency instanceof Fixture) { + finalizedBy(((Fixture)dependency).stopTask) + } + } + return this + } + + @Override + public void setDependsOn(Iterable dependencies) { + super.setDependsOn(dependencies) + for (Object dependency : dependencies) { + if (dependency instanceof Fixture) { + finalizedBy(((Fixture)dependency).stopTask) + } + } + } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy index 842ef8c35cd..bebed415ad8 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy @@ -1,7 +1,6 @@ package org.elasticsearch.gradle.test import org.gradle.api.DefaultTask -import org.gradle.api.Project import org.gradle.api.Task import org.gradle.api.internal.tasks.options.Option import org.gradle.util.ConfigureUtil diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy index f317254cd45..af2b20e4abf 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy @@ -27,7 +27,6 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.plugins.JavaBasePlugin -import org.gradle.plugins.ide.eclipse.model.EclipseClasspath /** Configures the build to have a rest integration test. */ public class StandaloneTestBasePlugin implements Plugin { @@ -42,7 +41,7 @@ public class StandaloneTestBasePlugin implements Plugin { // only setup tests to build project.sourceSets.create('test') - project.dependencies.add('testCompile', "org.elasticsearch:test-framework:${VersionProperties.elasticsearch}") + project.dependencies.add('testCompile', "org.elasticsearch.test:framework:${VersionProperties.elasticsearch}") project.eclipse.classpath.sourceSets = [project.sourceSets.test] project.eclipse.classpath.plusConfigurations = [project.configurations.testRuntime] diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy index 6af9edd119c..2f2030f6cd2 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy @@ -19,8 +19,7 @@ package org.elasticsearch.gradle.vagrant import org.gradle.api.DefaultTask -import org.gradle.api.tasks.* -import org.gradle.logging.ProgressLogger +import org.gradle.api.tasks.TaskAction import org.gradle.logging.ProgressLoggerFactory import org.gradle.process.internal.ExecAction import org.gradle.process.internal.ExecActionFactory diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy index 351c34a893b..92b4a575eba 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy @@ -19,8 +19,7 @@ package org.elasticsearch.gradle.vagrant import org.gradle.api.DefaultTask -import org.gradle.api.tasks.* -import org.gradle.logging.ProgressLogger +import org.gradle.api.tasks.TaskAction import org.gradle.logging.ProgressLoggerFactory import org.gradle.process.internal.ExecAction import org.gradle.process.internal.ExecActionFactory diff --git a/buildSrc/src/main/resources/forbidden/third-party-audit.txt b/buildSrc/src/main/resources/forbidden/third-party-audit.txt new file mode 100644 index 00000000000..0346d6d8a58 --- /dev/null +++ b/buildSrc/src/main/resources/forbidden/third-party-audit.txt @@ -0,0 +1,98 @@ +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on +# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +# Checks that we run against bytecode of third-party dependencies +# +# Be judicious about what is denied here: MANY classes will be subject +# to these rules, so please try to keep the false positive rate low! +# +# Each third party .class failing checks will need to be explicitly +# listed in the module's build.gradle file: +# +# thirdPartyAudit.excludes = [ +# // uses internal java api: sun.misc.Unsafe +# 'org.foo.Bar', +# // missing class! +# 'com.missing.dependency.WTF', +# // ... +# ] +# +# Wildcards are not allowed, excludes must be exact. The build also fails with +# the message "Invalid exclusions, nothing is wrong with these classes" if +# extraneous classes are in the excludes list, this ensures the list is +# up-to-date, and that each module accurately documents the evil things +# that its dependencies do. +# +# For more information, look at ThirdPartyAuditTask.groovy in buildSrc/ + +# +# Ruleset to fail on java internal apis, using this logic: +# http://docs.oracle.com/javase/8/docs/api/java/lang/SecurityManager.html#checkPackageAccess-java.lang.String- +# +# // The list may change at any time, regenerated with: +# for (String pkg : new TreeSet<>(Arrays.asList( +# Security.getProperty("package.access").split(",")))) { +# System.out.println(pkg + "**"); +# } +# +@defaultMessage non-public internal runtime class +com.oracle.webservices.internal.** +com.oracle.xmlns.internal.** +com.sun.activation.registries.** +com.sun.browser.** +com.sun.corba.se.** +com.sun.glass.** +com.sun.imageio.** +com.sun.istack.internal.** +com.sun.javafx.** +com.sun.jmx.** +com.sun.media.** +com.sun.media.sound.** +com.sun.naming.internal.** +com.sun.openpisces.** +com.sun.org.apache.bcel.internal.** +com.sun.org.apache.regexp.internal.** +com.sun.org.apache.xalan.internal.extensions.** +com.sun.org.apache.xalan.internal.lib.** +com.sun.org.apache.xalan.internal.res.** +com.sun.org.apache.xalan.internal.templates.** +com.sun.org.apache.xalan.internal.utils.** +com.sun.org.apache.xalan.internal.xslt.** +com.sun.org.apache.xalan.internal.xsltc.cmdline.** +com.sun.org.apache.xalan.internal.xsltc.compiler.** +com.sun.org.apache.xalan.internal.xsltc.trax.** +com.sun.org.apache.xalan.internal.xsltc.util.** +com.sun.org.apache.xerces.internal.** +com.sun.org.apache.xml.internal.res.** +com.sun.org.apache.xml.internal.security.** +com.sun.org.apache.xml.internal.serializer.utils.** +com.sun.org.apache.xml.internal.utils.** +com.sun.org.apache.xpath.internal.** +com.sun.org.glassfish.** +com.sun.pisces.** +com.sun.prism.** +com.sun.proxy.** +com.sun.scenario.** +com.sun.t2k.** +com.sun.webkit.** +com.sun.xml.internal.** +jdk.internal.** +jdk.management.resource.internal.** +jdk.nashorn.internal.** +jdk.nashorn.tools.** +oracle.jrockit.jfr.** +org.jcp.xml.dsig.internal.** +sun.** diff --git a/buildSrc/version.properties b/buildSrc/version.properties index e33383afa23..e073730fe12 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 3.0.0-SNAPSHOT -lucene = 5.5.0-snapshot-1719088 +lucene = 5.5.0-snapshot-1721183 # optional dependencies spatial4j = 0.5 diff --git a/core/build.gradle b/core/build.gradle index fd8a0c10f5a..7b80449729e 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -17,9 +17,9 @@ * under the License. */ + import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.elasticsearch.gradle.BuildPlugin -import org.elasticsearch.gradle.test.RestSpecHack apply plugin: 'elasticsearch.build' apply plugin: 'com.bmuschko.nexus' @@ -82,7 +82,7 @@ dependencies { compile "net.java.dev.jna:jna:${versions.jna}", optional if (isEclipse == false || project.path == ":core-tests") { - testCompile("org.elasticsearch:test-framework:${version}") { + testCompile("org.elasticsearch.test:framework:${version}") { // tests use the locally compiled version of core exclude group: 'org.elasticsearch', module: 'elasticsearch' } @@ -111,6 +111,123 @@ forbiddenPatterns { exclude '**/org/elasticsearch/cluster/routing/shard_routes.txt' } +thirdPartyAudit.excludes = [ + // uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) + 'org.jboss.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + + // classes are missing! + + // from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml) + 'com.fasterxml.jackson.databind.ObjectMapper', + + // from org.jboss.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder (netty) + 'com.google.protobuf.CodedInputStream', + + // from org.jboss.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender (netty) + 'com.google.protobuf.CodedOutputStream', + + // from org.jboss.netty.handler.codec.protobuf.ProtobufDecoder (netty) + 'com.google.protobuf.ExtensionRegistry', + 'com.google.protobuf.MessageLite$Builder', + 'com.google.protobuf.MessageLite', + 'com.google.protobuf.Parser', + + // from org.apache.log4j.receivers.net.JMSReceiver (log4j-extras) + 'javax.jms.Message', + 'javax.jms.MessageListener', + 'javax.jms.ObjectMessage', + 'javax.jms.TopicConnection', + 'javax.jms.TopicConnectionFactory', + 'javax.jms.TopicPublisher', + 'javax.jms.TopicSession', + 'javax.jms.TopicSubscriber', + + // from org.apache.log4j.net.SMTPAppender (log4j) + 'javax.mail.Authenticator', + 'javax.mail.Message$RecipientType', + 'javax.mail.Message', + 'javax.mail.Multipart', + 'javax.mail.PasswordAuthentication', + 'javax.mail.Session', + 'javax.mail.Transport', + 'javax.mail.internet.InternetAddress', + 'javax.mail.internet.InternetHeaders', + 'javax.mail.internet.MimeBodyPart', + 'javax.mail.internet.MimeMessage', + 'javax.mail.internet.MimeMultipart', + 'javax.mail.internet.MimeUtility', + + // from org.jboss.netty.channel.socket.http.HttpTunnelingServlet (netty) + 'javax.servlet.ServletConfig', + 'javax.servlet.ServletException', + 'javax.servlet.ServletOutputStream', + 'javax.servlet.http.HttpServlet', + 'javax.servlet.http.HttpServletRequest', + 'javax.servlet.http.HttpServletResponse', + + // from org.jboss.netty.logging.CommonsLoggerFactory (netty) + 'org.apache.commons.logging.Log', + 'org.apache.commons.logging.LogFactory', + + // from org.apache.lucene.sandbox.queries.regex.JakartaRegexpCapabilities$JakartaRegexMatcher (lucene-sandbox) + 'org.apache.regexp.CharacterIterator', + 'org.apache.regexp.RE', + 'org.apache.regexp.REProgram', + + // from org.jboss.netty.handler.ssl.OpenSslEngine (netty) + 'org.apache.tomcat.jni.Buffer', + 'org.apache.tomcat.jni.Library', + 'org.apache.tomcat.jni.Pool', + 'org.apache.tomcat.jni.SSL', + 'org.apache.tomcat.jni.SSLContext', + + // from org.jboss.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) + 'org.bouncycastle.asn1.x500.X500Name', + 'org.bouncycastle.cert.X509v3CertificateBuilder', + 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', + 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder', + 'org.bouncycastle.jce.provider.BouncyCastleProvider', + 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', + + // from org.jboss.netty.handler.ssl.JettyNpnSslEngine (netty) + 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', + 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', + 'org.eclipse.jetty.npn.NextProtoNego', + + // from org.jboss.netty.logging.JBossLoggerFactory (netty) + 'org.jboss.logging.Logger', + + // from org.jboss.netty.handler.codec.marshalling.ChannelBufferByteInput (netty) + 'org.jboss.marshalling.ByteInput', + + // from org.jboss.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty) + 'org.jboss.marshalling.ByteOutput', + + // from org.jboss.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty) + 'org.jboss.marshalling.Marshaller', + + // from org.jboss.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty) + 'org.jboss.marshalling.MarshallerFactory', + 'org.jboss.marshalling.MarshallingConfiguration', + 'org.jboss.marshalling.Unmarshaller', + + // from com.spatial4j.core.io.GeoJSONReader (spatial4j) + 'org.noggit.JSONParser', + + // from org.jboss.netty.container.osgi.NettyBundleActivator (netty) + 'org.osgi.framework.BundleActivator', + 'org.osgi.framework.BundleContext', + + // from org.jboss.netty.logging.OsgiLoggerFactory$1 (netty) + 'org.osgi.framework.ServiceReference', + 'org.osgi.service.log.LogService', + 'org.osgi.util.tracker.ServiceTracker', + 'org.osgi.util.tracker.ServiceTrackerCustomizer', + + 'org.slf4j.impl.StaticMDCBinder', + 'org.slf4j.impl.StaticMarkerBinder', +] + // dependency license are currently checked in distribution dependencyLicenses.enabled = false diff --git a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 81f49055223..645929d3992 100644 --- a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -18,9 +18,19 @@ */ package org.apache.lucene.queries; -import org.apache.lucene.index.*; -import org.apache.lucene.search.*; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexReaderContext; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermContext; +import org.apache.lucene.index.TermState; +import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.ToStringUtils; @@ -44,7 +54,7 @@ import java.util.Objects; * While aggregating the total term frequency is trivial since it * can be summed up not every {@link org.apache.lucene.search.similarities.Similarity} * makes use of this statistic. The document frequency which is used in the - * {@link org.apache.lucene.search.similarities.DefaultSimilarity} + * {@link org.apache.lucene.search.similarities.ClassicSimilarity} * can only be estimated as an lower-bound since it is a document based statistic. For * the document frequency the maximum frequency across all fields per term is used * which is the minimum number of documents the terms occurs in. diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 9f2b1b66221..89eedce09d8 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -23,7 +23,14 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.index.Term; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.MultiPhraseQuery; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.Query; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.lucene.search.Queries; @@ -35,7 +42,12 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded; @@ -214,7 +226,7 @@ public class MapperQueryParser extends QueryParser { } } if (query == null) { - query = super.getFieldQuery(currentFieldType.names().indexName(), queryText, quoted); + query = super.getFieldQuery(currentFieldType.name(), queryText, quoted); } return query; } @@ -454,7 +466,7 @@ public class MapperQueryParser extends QueryParser { query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, context); } if (query == null) { - query = getPossiblyAnalyzedPrefixQuery(currentFieldType.names().indexName(), termStr); + query = getPossiblyAnalyzedPrefixQuery(currentFieldType.name(), termStr); } return query; } @@ -580,7 +592,7 @@ public class MapperQueryParser extends QueryParser { if (!settings.forceAnalyzer()) { setAnalyzer(context.getSearchAnalyzer(currentFieldType)); } - indexedNameField = currentFieldType.names().indexName(); + indexedNameField = currentFieldType.name(); return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr); } return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr); diff --git a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java index 95657de5158..11b56bdcfe1 100644 --- a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java +++ b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java @@ -22,7 +22,11 @@ package org.apache.lucene.search.vectorhighlight; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; -import org.apache.lucene.search.*; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MultiPhraseQuery; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; @@ -72,10 +76,10 @@ public class CustomFieldQuery extends FieldQuery { super.flatten(sourceQuery, reader, flatQueries, boost); } } - + private void convertMultiPhraseQuery(int currentPos, int[] termsIdx, MultiPhraseQuery orig, List terms, int[] pos, IndexReader reader, Collection flatQueries) throws IOException { if (currentPos == 0) { - // if we have more than 16 terms + // if we have more than 16 terms int numTerms = 0; for (Term[] currentPosTerm : terms) { numTerms += currentPosTerm.length; @@ -83,7 +87,7 @@ public class CustomFieldQuery extends FieldQuery { if (numTerms > 16) { for (Term[] currentPosTerm : terms) { for (Term term : currentPosTerm) { - super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost()); + super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost()); } } return; diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 18376aff88f..9f9dbf18869 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -30,7 +30,13 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; /** diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index a5e2e38ca26..ac2575597e8 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -268,13 +268,19 @@ public class Version { public static final int V_2_0_1_ID = 2000199; public static final Version V_2_0_1 = new Version(V_2_0_1_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1); public static final int V_2_0_2_ID = 2000299; - public static final Version V_2_0_2 = new Version(V_2_0_2_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1); + public static final Version V_2_0_2 = new Version(V_2_0_2_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1); + public static final int V_2_0_3_ID = 2000399; + public static final Version V_2_0_3 = new Version(V_2_0_3_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1); public static final int V_2_1_0_ID = 2010099; public static final Version V_2_1_0 = new Version(V_2_1_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_3_1); public static final int V_2_1_1_ID = 2010199; - public static final Version V_2_1_1 = new Version(V_2_1_1_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_1); + public static final Version V_2_1_1 = new Version(V_2_1_1_ID, false, org.apache.lucene.util.Version.LUCENE_5_3_1); + public static final int V_2_1_2_ID = 2010299; + public static final Version V_2_1_2 = new Version(V_2_1_2_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_1); public static final int V_2_2_0_ID = 2020099; public static final Version V_2_2_0 = new Version(V_2_2_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0); + public static final int V_2_3_0_ID = 2030099; + public static final Version V_2_3_0 = new Version(V_2_3_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0); public static final int V_3_0_0_ID = 3000099; public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_5_0); public static final Version CURRENT = V_3_0_0; @@ -291,12 +297,18 @@ public class Version { switch (id) { case V_3_0_0_ID: return V_3_0_0; + case V_2_3_0_ID: + return V_2_3_0; case V_2_2_0_ID: return V_2_2_0; + case V_2_1_2_ID: + return V_2_1_2; case V_2_1_1_ID: return V_2_1_1; case V_2_1_0_ID: return V_2_1_0; + case V_2_0_3_ID: + return V_2_0_3; case V_2_0_2_ID: return V_2_0_2; case V_2_0_1_ID: diff --git a/core/src/main/java/org/elasticsearch/action/ActionFuture.java b/core/src/main/java/org/elasticsearch/action/ActionFuture.java index 2d5f6781d71..1bd5d16b03d 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionFuture.java +++ b/core/src/main/java/org/elasticsearch/action/ActionFuture.java @@ -19,8 +19,6 @@ package org.elasticsearch.action; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.unit.TimeValue; import java.util.concurrent.Future; diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 88ccb809712..11cafb326a0 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -28,6 +28,8 @@ import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.TransportDeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; @@ -58,6 +60,8 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksAction; import org.elasticsearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction; +import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateAction; +import org.elasticsearch.action.admin.cluster.validate.template.TransportRenderSearchTemplateAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistAction; @@ -79,7 +83,9 @@ import org.elasticsearch.action.admin.indices.exists.indices.TransportIndicesExi import org.elasticsearch.action.admin.indices.exists.types.TransportTypesExistsAction; import org.elasticsearch.action.admin.indices.exists.types.TypesExistsAction; import org.elasticsearch.action.admin.indices.flush.FlushAction; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction; import org.elasticsearch.action.admin.indices.flush.TransportFlushAction; +import org.elasticsearch.action.admin.indices.flush.TransportSyncedFlushAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.TransportForceMergeAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; @@ -121,8 +127,6 @@ import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsAction; import org.elasticsearch.action.admin.indices.validate.query.TransportValidateQueryAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; -import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateAction; -import org.elasticsearch.action.admin.cluster.validate.template.TransportRenderSearchTemplateAction; import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerAction; import org.elasticsearch.action.admin.indices.warmer.delete.TransportDeleteWarmerAction; import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersAction; @@ -253,6 +257,7 @@ public class ActionModule extends AbstractModule { registerAction(NodesInfoAction.INSTANCE, TransportNodesInfoAction.class); registerAction(NodesStatsAction.INSTANCE, TransportNodesStatsAction.class); registerAction(NodesHotThreadsAction.INSTANCE, TransportNodesHotThreadsAction.class); + registerAction(ListTasksAction.INSTANCE, TransportListTasksAction.class); registerAction(ClusterStatsAction.INSTANCE, TransportClusterStatsAction.class); registerAction(ClusterStateAction.INSTANCE, TransportClusterStateAction.class); @@ -293,6 +298,7 @@ public class ActionModule extends AbstractModule { registerAction(ValidateQueryAction.INSTANCE, TransportValidateQueryAction.class); registerAction(RefreshAction.INSTANCE, TransportRefreshAction.class); registerAction(FlushAction.INSTANCE, TransportFlushAction.class); + registerAction(SyncedFlushAction.INSTANCE, TransportSyncedFlushAction.class); registerAction(ForceMergeAction.INSTANCE, TransportForceMergeAction.class); registerAction(UpgradeAction.INSTANCE, TransportUpgradeAction.class); registerAction(UpgradeStatusAction.INSTANCE, TransportUpgradeStatusAction.class); diff --git a/core/src/main/java/org/elasticsearch/action/ActionRequest.java b/core/src/main/java/org/elasticsearch/action/ActionRequest.java index 24cf68025ba..45e7e76ca0b 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ActionRequest.java @@ -38,7 +38,7 @@ public abstract class ActionRequest extends TransportRe super(request); // this does not set the listenerThreaded API, if needed, its up to the caller to set it // since most times, we actually want it to not be threaded... - //this.listenerThreaded = request.listenerThreaded(); + // this.listenerThreaded = request.listenerThreaded(); } public abstract ActionRequestValidationException validate(); diff --git a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 009d3fc47a9..c0389c6c194 100644 --- a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -21,7 +21,6 @@ package org.elasticsearch.action; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.StatusToXContent; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.shard.ShardId; diff --git a/core/src/main/java/org/elasticsearch/action/DocumentRequest.java b/core/src/main/java/org/elasticsearch/action/DocumentRequest.java index fcfea39ab54..a90f013a6b9 100644 --- a/core/src/main/java/org/elasticsearch/action/DocumentRequest.java +++ b/core/src/main/java/org/elasticsearch/action/DocumentRequest.java @@ -62,4 +62,12 @@ public interface DocumentRequest extends IndicesRequest { * @return the Routing */ String routing(); + + + /** + * Get the parent for this request + * @return the Parent + */ + String parent(); + } diff --git a/core/src/main/java/org/elasticsearch/action/NoSuchNodeException.java b/core/src/main/java/org/elasticsearch/action/NoSuchNodeException.java index a3e7474ea6b..3f8ce1441c5 100644 --- a/core/src/main/java/org/elasticsearch/action/NoSuchNodeException.java +++ b/core/src/main/java/org/elasticsearch/action/NoSuchNodeException.java @@ -19,7 +19,6 @@ package org.elasticsearch.action; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/action/TaskOperationFailure.java b/core/src/main/java/org/elasticsearch/action/TaskOperationFailure.java new file mode 100644 index 00000000000..bf5051c1a19 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/TaskOperationFailure.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +import static org.elasticsearch.ExceptionsHelper.detailedMessage; + +/** + * Information about task operation failures + * + * The class is final due to serialization limitations + */ +public final class TaskOperationFailure implements Writeable, ToXContent { + + private final String nodeId; + + private final long taskId; + + private final Throwable reason; + + private final RestStatus status; + + public TaskOperationFailure(StreamInput in) throws IOException { + nodeId = in.readString(); + taskId = in.readLong(); + reason = in.readThrowable(); + status = RestStatus.readFrom(in); + } + + public TaskOperationFailure(String nodeId, long taskId, Throwable t) { + this.nodeId = nodeId; + this.taskId = taskId; + this.reason = t; + status = ExceptionsHelper.status(t); + } + + public String getNodeId() { + return this.nodeId; + } + + public long getTaskId() { + return this.taskId; + } + + public String getReason() { + return detailedMessage(reason); + } + + public RestStatus getStatus() { + return status; + } + + public Throwable getCause() { + return reason; + } + + @Override + public TaskOperationFailure readFrom(StreamInput in) throws IOException { + return new TaskOperationFailure(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(nodeId); + out.writeLong(taskId); + out.writeThrowable(reason); + RestStatus.writeTo(out, status); + } + + @Override + public String toString() { + return "[" + nodeId + "][" + taskId + "] failed, reason [" + getReason() + "]"; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("task_id", getTaskId()); + builder.field("node_id", getNodeId()); + builder.field("status", status.name()); + if (reason != null) { + builder.field("reason"); + builder.startObject(); + ElasticsearchException.toXContent(builder, params, reason); + builder.endObject(); + } + return builder; + + } + +} diff --git a/core/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java b/core/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java index 2c66a5ef762..425d599ba8f 100644 --- a/core/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java +++ b/core/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java @@ -23,7 +23,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; /** * A generic proxy that will execute the given action against a specific node. diff --git a/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java b/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java index ff31bb715db..35340213145 100644 --- a/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java +++ b/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java @@ -21,7 +21,6 @@ package org.elasticsearch.action; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; @@ -52,4 +51,4 @@ public class UnavailableShardsException extends ElasticsearchException { public RestStatus status() { return RestStatus.SERVICE_UNAVAILABLE; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java index 27add930ea8..ccae17b1eeb 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java @@ -22,9 +22,9 @@ package org.elasticsearch.action.admin.cluster.health; import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.health.ClusterStateHealth; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterIndexHealth; +import org.elasticsearch.cluster.health.ClusterStateHealth; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index f1cc59ba760..79adbafa9bb 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -23,7 +23,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -34,6 +38,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -71,7 +76,13 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< } @Override - protected void masterOperation(final ClusterHealthRequest request, final ClusterState unusedState, final ActionListener listener) { + protected final void masterOperation(ClusterHealthRequest request, ClusterState state, ActionListener listener) throws Exception { + logger.warn("attempt to execute a cluster health operation without a task"); + throw new UnsupportedOperationException("task parameter is required for this operation"); + } + + @Override + protected void masterOperation(Task task, final ClusterHealthRequest request, final ClusterState unusedState, final ActionListener listener) { if (request.waitForEvents() != null) { final long endTimeMS = TimeValue.nsecToMSec(System.nanoTime()) + request.timeout().millis(); clusterService.submitStateUpdateTask("cluster_health (wait_for_events [" + request.waitForEvents() + "])", new ClusterStateUpdateTask(request.waitForEvents()) { @@ -91,7 +102,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< @Override public void onNoLongerMaster(String source) { logger.trace("stopped being master while waiting for events with priority [{}]. retrying.", request.waitForEvents()); - doExecute(request, listener); + doExecute(task, request, listener); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java index 069e4197e8a..60001f50b62 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java @@ -19,9 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; -import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.unit.TimeValue; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java index 329be6c7614..d262888e964 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.admin.cluster.node.info; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java index 5da4f8897cd..65913bc4b28 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java @@ -23,7 +23,9 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportService; public final class TransportLivenessAction implements TransportRequestHandler { diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java index 1b4b7b06790..a4cf2b1de2a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.admin.cluster.node.stats; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; @@ -340,4 +339,4 @@ public class NodeStats extends BaseNodeResponse implements ToXContent { return builder; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/rest/RestModule.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java similarity index 51% rename from core/src/main/java/org/elasticsearch/rest/RestModule.java rename to core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java index e7949172d0a..acc11861108 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestModule.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java @@ -17,35 +17,30 @@ * under the License. */ -package org.elasticsearch.rest; +package org.elasticsearch.action.admin.cluster.node.tasks.list; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.action.RestActionModule; - -import java.util.ArrayList; -import java.util.List; +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; /** - * + * Action for retrieving a list of currently running tasks */ -public class RestModule extends AbstractModule { +public class ListTasksAction extends Action { - private final Settings settings; - private List> restPluginsActions = new ArrayList<>(); + public static final ListTasksAction INSTANCE = new ListTasksAction(); + public static final String NAME = "cluster:monitor/tasks/lists"; - public void addRestAction(Class restAction) { - restPluginsActions.add(restAction); + private ListTasksAction() { + super(NAME); } - public RestModule(Settings settings) { - this.settings = settings; - } - - @Override - protected void configure() { - bind(RestController.class).asEagerSingleton(); - new RestActionModule(restPluginsActions).configure(binder()); + public ListTasksResponse newResponse() { + return new ListTasksResponse(); + } + + @Override + public ListTasksRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new ListTasksRequestBuilder(client, this); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java new file mode 100644 index 00000000000..0b0637e0b8e --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.tasks.list; + +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +/** + * A request to get node tasks + */ +public class ListTasksRequest extends BaseTasksRequest { + + private boolean detailed = false; + + /** + * Get information from nodes based on the nodes ids specified. If none are passed, information + * for all nodes will be returned. + */ + public ListTasksRequest(String... nodesIds) { + super(nodesIds); + } + + /** + * Should the detailed task information be returned. + */ + public boolean detailed() { + return this.detailed; + } + + /** + * Should the node settings be returned. + */ + public ListTasksRequest detailed(boolean detailed) { + this.detailed = detailed; + return this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + detailed = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(detailed); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java new file mode 100644 index 00000000000..2b462014f43 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.tasks.list; + +import org.elasticsearch.action.support.tasks.TasksRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * Builder for the request to retrieve the list of tasks running on the specified nodes + */ +public class ListTasksRequestBuilder extends TasksRequestBuilder { + + public ListTasksRequestBuilder(ElasticsearchClient client, ListTasksAction action) { + super(client, action, new ListTasksRequest()); + } + + /** + * Should detailed task information be returned. + */ + public ListTasksRequestBuilder setDetailed(boolean detailed) { + request.detailed(detailed); + return this; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java new file mode 100644 index 00000000000..2da9701fcfa --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java @@ -0,0 +1,159 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.tasks.list; + +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Returns the list of tasks currently running on the nodes + */ +public class ListTasksResponse extends BaseTasksResponse implements ToXContent { + + private List tasks; + + private Map> nodes; + + public ListTasksResponse() { + } + + public ListTasksResponse(List tasks, List taskFailures, List nodeFailures) { + super(taskFailures, nodeFailures); + this.tasks = tasks == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(tasks)); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + tasks = Collections.unmodifiableList(in.readList(TaskInfo::new)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeList(tasks); + } + + /** + * Returns the list of tasks by node + */ + public Map> getPerNodeTasks() { + if (nodes != null) { + return nodes; + } + Map> nodeTasks = new HashMap<>(); + + Set nodes = new HashSet<>(); + for (TaskInfo shard : tasks) { + nodes.add(shard.getNode()); + } + + for (DiscoveryNode node : nodes) { + List tasks = new ArrayList<>(); + for (TaskInfo taskInfo : this.tasks) { + if (taskInfo.getNode().equals(node)) { + tasks.add(taskInfo); + } + } + nodeTasks.put(node, tasks); + } + this.nodes = nodeTasks; + return nodeTasks; + } + + public List getTasks() { + return tasks; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (getTaskFailures() != null && getTaskFailures().size() > 0) { + builder.startArray("task_failures"); + for (TaskOperationFailure ex : getTaskFailures()){ + builder.value(ex); + } + builder.endArray(); + } + + if (getNodeFailures() != null && getNodeFailures().size() > 0) { + builder.startArray("node_failures"); + for (FailedNodeException ex : getNodeFailures()){ + builder.value(ex); + } + builder.endArray(); + } + + builder.startObject("nodes"); + for (Map.Entry> entry : getPerNodeTasks().entrySet()) { + DiscoveryNode node = entry.getKey(); + builder.startObject(node.getId(), XContentBuilder.FieldCaseConversion.NONE); + builder.field("name", node.name()); + builder.field("transport_address", node.address().toString()); + builder.field("host", node.getHostName()); + builder.field("ip", node.getAddress()); + + if (!node.attributes().isEmpty()) { + builder.startObject("attributes"); + for (ObjectObjectCursor attr : node.attributes()) { + builder.field(attr.key, attr.value, XContentBuilder.FieldCaseConversion.NONE); + } + builder.endObject(); + } + builder.startArray("tasks"); + for(TaskInfo task : entry.getValue()) { + task.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + try { + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + return builder.string(); + } catch (IOException e) { + return "{ \"error\" : \"" + e.getMessage() + "\"}"; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TaskInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TaskInfo.java new file mode 100644 index 00000000000..ed43da2c4ed --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TaskInfo.java @@ -0,0 +1,140 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.tasks.list; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * Information about a currently running task. + *

+ * Tasks are used for communication with transport actions. As a result, they can contain callback + * references as well as mutable state. That makes it impractical to send tasks over transport channels + * and use in APIs. Instead, immutable and streamable TaskInfo objects are used to represent + * snapshot information about currently running tasks. + */ +public class TaskInfo implements Writeable, ToXContent { + + private final DiscoveryNode node; + + private final long id; + + private final String type; + + private final String action; + + private final String description; + + private final String parentNode; + + private final long parentId; + + public TaskInfo(DiscoveryNode node, long id, String type, String action, String description) { + this(node, id, type, action, description, null, -1L); + } + + public TaskInfo(DiscoveryNode node, long id, String type, String action, String description, String parentNode, long parentId) { + this.node = node; + this.id = id; + this.type = type; + this.action = action; + this.description = description; + this.parentNode = parentNode; + this.parentId = parentId; + } + + public TaskInfo(StreamInput in) throws IOException { + node = DiscoveryNode.readNode(in); + id = in.readLong(); + type = in.readString(); + action = in.readString(); + description = in.readOptionalString(); + parentNode = in.readOptionalString(); + parentId = in.readLong(); + } + + public DiscoveryNode getNode() { + return node; + } + + public long getId() { + return id; + } + + public String getType() { + return type; + } + + public String getAction() { + return action; + } + + public String getDescription() { + return description; + } + + public String getParentNode() { + return parentNode; + } + + public long getParentId() { + return parentId; + } + + @Override + public TaskInfo readFrom(StreamInput in) throws IOException { + return new TaskInfo(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + node.writeTo(out); + out.writeLong(id); + out.writeString(type); + out.writeString(action); + out.writeOptionalString(description); + out.writeOptionalString(parentNode); + out.writeLong(parentId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("node", node.getId()); + builder.field("id", id); + builder.field("type", type); + builder.field("action", action); + if (description != null) { + builder.field("description", description); + } + if (parentNode != null) { + builder.field("parent_node", parentNode); + builder.field("parent_id", parentId); + } + builder.endObject(); + return builder; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java new file mode 100644 index 00000000000..5475a394f34 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.tasks.list; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.List; + +/** + * + */ +public class TransportListTasksAction extends TransportTasksAction { + + @Inject + public TransportListTasksAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, ListTasksAction.NAME, clusterName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, ListTasksRequest::new, ListTasksResponse::new, ThreadPool.Names.MANAGEMENT); + } + + @Override + protected ListTasksResponse newResponse(ListTasksRequest request, List tasks, List taskOperationFailures, List failedNodeExceptions) { + return new ListTasksResponse(tasks, taskOperationFailures, failedNodeExceptions); + } + + @Override + protected TaskInfo readTaskResponse(StreamInput in) throws IOException { + return new TaskInfo(in); + } + + @Override + protected TaskInfo taskOperation(ListTasksRequest request, Task task) { + return task.taskInfo(clusterService.localNode(), request.detailed()); + } + + @Override + protected boolean accumulateExceptions() { + return true; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java index 152f8230658..451ef21f632 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java @@ -24,11 +24,12 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.*; -import org.elasticsearch.repositories.VerificationFailure; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.common.xcontent.XContentHelper; import java.io.IOException; -import java.util.Arrays; /** * Unregister repository response diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java index 28f9cb1db90..fbb6a8d18e8 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.cluster.reroute; -import org.elasticsearch.Version; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java new file mode 100644 index 00000000000..f5020a46b37 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.settings; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.cluster.ClusterState.builder; + +/** + * Updates transient and persistent cluster state settings if there are any changes + * due to the update. + */ +final class SettingsUpdater { + final Settings.Builder transientUpdates = Settings.settingsBuilder(); + final Settings.Builder persistentUpdates = Settings.settingsBuilder(); + private final ClusterSettings clusterSettings; + + SettingsUpdater(ClusterSettings clusterSettings) { + this.clusterSettings = clusterSettings; + } + + synchronized Settings getTransientUpdates() { + return transientUpdates.build(); + } + + synchronized Settings getPersistentUpdate() { + return persistentUpdates.build(); + } + + synchronized ClusterState updateSettings(final ClusterState currentState, Settings transientToApply, Settings persistentToApply) { + boolean changed = false; + Settings.Builder transientSettings = Settings.settingsBuilder(); + transientSettings.put(currentState.metaData().transientSettings()); + changed |= apply(transientToApply, transientSettings, transientUpdates, "transient"); + + Settings.Builder persistentSettings = Settings.settingsBuilder(); + persistentSettings.put(currentState.metaData().persistentSettings()); + changed |= apply(persistentToApply, persistentSettings, persistentUpdates, "persistent"); + + if (!changed) { + return currentState; + } + + MetaData.Builder metaData = MetaData.builder(currentState.metaData()) + .persistentSettings(persistentSettings.build()) + .transientSettings(transientSettings.build()); + + ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + boolean updatedReadOnly = MetaData.SETTING_READ_ONLY_SETTING.get(metaData.persistentSettings()) || MetaData.SETTING_READ_ONLY_SETTING.get(metaData.transientSettings()); + if (updatedReadOnly) { + blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); + } else { + blocks.removeGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); + } + ClusterState build = builder(currentState).metaData(metaData).blocks(blocks).build(); + Settings settings = build.metaData().settings(); + // now we try to apply things and if they are invalid we fail + // this dryRun will validate & parse settings but won't actually apply them. + clusterSettings.dryRun(settings); + return build; + } + + private boolean apply(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) { + boolean changed = false; + final Set toRemove = new HashSet<>(); + Settings.Builder settingsBuilder = Settings.settingsBuilder(); + for (Map.Entry entry : toApply.getAsMap().entrySet()) { + if (entry.getValue() == null) { + toRemove.add(entry.getKey()); + } else if (clusterSettings.isLoggerSetting(entry.getKey()) || clusterSettings.hasDynamicSetting(entry.getKey())) { + settingsBuilder.put(entry.getKey(), entry.getValue()); + updates.put(entry.getKey(), entry.getValue()); + changed = true; + } else { + throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable"); + } + + } + changed |= applyDeletes(toRemove, target); + target.put(settingsBuilder.build()); + return changed; + } + + private final boolean applyDeletes(Set deletes, Settings.Builder builder) { + boolean changed = false; + for (String entry : deletes) { + Set keysToRemove = new HashSet<>(); + Set keySet = builder.internalMap().keySet(); + for (String key : keySet) { + if (Regex.simpleMatch(entry, key)) { + keysToRemove.add(key); + } + } + for (String key : keysToRemove) { + builder.remove(key); + changed = true; + } + } + return changed; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index 73d14a2bb11..75f94921e61 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -28,25 +28,19 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.settings.ClusterDynamicSettings; -import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.Map; - -import static org.elasticsearch.cluster.ClusterState.builder; - /** * */ @@ -54,15 +48,14 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct private final AllocationService allocationService; - private final DynamicSettings dynamicSettings; + private final ClusterSettings clusterSettings; @Inject public TransportClusterUpdateSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, - AllocationService allocationService, @ClusterDynamicSettings DynamicSettings dynamicSettings, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + AllocationService allocationService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterSettings clusterSettings) { super(settings, ClusterUpdateSettingsAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, ClusterUpdateSettingsRequest::new); this.allocationService = allocationService; - this.dynamicSettings = dynamicSettings; + this.clusterSettings = clusterSettings; } @Override @@ -73,8 +66,8 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct @Override protected ClusterBlockException checkBlock(ClusterUpdateSettingsRequest request, ClusterState state) { // allow for dedicated changes to the metadata blocks, so we don't block those to allow to "re-enable" it - if ((request.transientSettings().getAsMap().isEmpty() && request.persistentSettings().getAsMap().size() == 1 && request.persistentSettings().get(MetaData.SETTING_READ_ONLY) != null) || - request.persistentSettings().getAsMap().isEmpty() && request.transientSettings().getAsMap().size() == 1 && request.transientSettings().get(MetaData.SETTING_READ_ONLY) != null) { + if ((request.transientSettings().getAsMap().isEmpty() && request.persistentSettings().getAsMap().size() == 1 && MetaData.SETTING_READ_ONLY_SETTING.exists(request.persistentSettings())) || + request.persistentSettings().getAsMap().isEmpty() && request.transientSettings().getAsMap().size() == 1 && MetaData.SETTING_READ_ONLY_SETTING.exists(request.transientSettings())) { return null; } return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); @@ -88,9 +81,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct @Override protected void masterOperation(final ClusterUpdateSettingsRequest request, final ClusterState state, final ActionListener listener) { - final Settings.Builder transientUpdates = Settings.settingsBuilder(); - final Settings.Builder persistentUpdates = Settings.settingsBuilder(); - + final SettingsUpdater updater = new SettingsUpdater(clusterSettings); clusterService.submitStateUpdateTask("cluster_update_settings", new AckedClusterStateUpdateTask(Priority.IMMEDIATE, request, listener) { @@ -98,7 +89,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct @Override protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) { - return new ClusterUpdateSettingsResponse(acknowledged, transientUpdates.build(), persistentUpdates.build()); + return new ClusterUpdateSettingsResponse(acknowledged, updater.getTransientUpdates(), updater.getPersistentUpdate()); } @Override @@ -125,7 +116,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct // so we should *not* execute the reroute. if (!clusterService.state().nodes().localNodeMaster()) { logger.debug("Skipping reroute after cluster update settings, because node is no longer master"); - listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build())); + listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, updater.getTransientUpdates(), updater.getPersistentUpdate())); return; } @@ -145,13 +136,13 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct @Override //we return when the cluster reroute is acked or it times out but the acknowledged flag depends on whether the update settings was acknowledged protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) { - return new ClusterUpdateSettingsResponse(updateSettingsAcked && acknowledged, transientUpdates.build(), persistentUpdates.build()); + return new ClusterUpdateSettingsResponse(updateSettingsAcked && acknowledged, updater.getTransientUpdates(), updater.getPersistentUpdate()); } @Override public void onNoLongerMaster(String source) { logger.debug("failed to preform reroute after cluster settings were updated - current node is no longer a master"); - listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build())); + listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, updater.getTransientUpdates(), updater.getPersistentUpdate())); } @Override @@ -181,58 +172,11 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct @Override public ClusterState execute(final ClusterState currentState) { - Settings.Builder transientSettings = Settings.settingsBuilder(); - transientSettings.put(currentState.metaData().transientSettings()); - for (Map.Entry entry : request.transientSettings().getAsMap().entrySet()) { - if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) { - String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue(), clusterService.state()); - if (error == null) { - transientSettings.put(entry.getKey(), entry.getValue()); - transientUpdates.put(entry.getKey(), entry.getValue()); - changed = true; - } else { - logger.warn("ignoring transient setting [{}], [{}]", entry.getKey(), error); - } - } else { - logger.warn("ignoring transient setting [{}], not dynamically updateable", entry.getKey()); - } - } - - Settings.Builder persistentSettings = Settings.settingsBuilder(); - persistentSettings.put(currentState.metaData().persistentSettings()); - for (Map.Entry entry : request.persistentSettings().getAsMap().entrySet()) { - if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) { - String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue(), clusterService.state()); - if (error == null) { - persistentSettings.put(entry.getKey(), entry.getValue()); - persistentUpdates.put(entry.getKey(), entry.getValue()); - changed = true; - } else { - logger.warn("ignoring persistent setting [{}], [{}]", entry.getKey(), error); - } - } else { - logger.warn("ignoring persistent setting [{}], not dynamically updateable", entry.getKey()); - } - } - - if (!changed) { - return currentState; - } - - MetaData.Builder metaData = MetaData.builder(currentState.metaData()) - .persistentSettings(persistentSettings.build()) - .transientSettings(transientSettings.build()); - - ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - boolean updatedReadOnly = metaData.persistentSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false) || metaData.transientSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false); - if (updatedReadOnly) { - blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); - } else { - blocks.removeGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); - } - - return builder(currentState).metaData(metaData).blocks(blocks).build(); + ClusterState clusterState = updater.updateSettings(currentState, request.transientSettings(), request.persistentSettings()); + changed = clusterState != currentState; + return clusterState; } }); } + } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java index ac01a40748b..a4321e56e43 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Map; /** */ diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java index 722eab9d709..2a3f8a0889a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.delete; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 70f49750f19..b5bb259409a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java index a3708f62bfd..72b686ccfde 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.admin.cluster.state; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java index 78c217d3621..3a6168315e0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java @@ -19,11 +19,11 @@ package org.elasticsearch.action.admin.cluster.stats; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -107,4 +107,4 @@ public class ClusterStatsNodeResponse extends BaseNodeResponse { ss.writeTo(out); } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index d8f2a5bbd20..5604616ed39 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -23,7 +23,6 @@ import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.cursors.ObjectIntCursor; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; @@ -38,6 +37,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.plugins.PluginInfo; import java.io.IOException; import java.net.InetAddress; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java index 5deb1fd7378..4eac6650a5e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.stats; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; @@ -168,4 +168,4 @@ public class ClusterStatsResponse extends BaseNodesResponse { private final ScriptService scriptService; @@ -55,7 +57,7 @@ public class TransportRenderSearchTemplateAction extends HandledTransportAction< @Override protected void doRun() throws Exception { - ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, request); + ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, request, Collections.emptyMap()); BytesReference processedTemplate = (BytesReference) executable.run(); RenderSearchTemplateResponse response = new RenderSearchTemplateResponse(); response.source(processedTemplate); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index ecdf977b923..0541ac31505 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -42,7 +42,13 @@ import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.analysis.*; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.shard.ShardId; @@ -53,7 +59,13 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.io.Reader; -import java.util.*; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; /** * Transport action used to execute analyze requests @@ -114,13 +126,13 @@ public class TransportAnalyzeAction extends TransportSingleShardAction implements NodeSettingsService.Listener { +public class TransportCloseIndexAction extends TransportMasterNodeAction { private final MetaDataIndexStateService indexStateService; private final DestructiveOperations destructiveOperations; private volatile boolean closeIndexEnabled; - public static final String SETTING_CLUSTER_INDICES_CLOSE_ENABLE = "cluster.indices.close.enable"; + public static final Setting CLUSTER_INDICES_CLOSE_ENABLE_SETTING = Setting.boolSetting("cluster.indices.close.enable", true, true, Setting.Scope.CLUSTER); @Inject public TransportCloseIndexAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, MetaDataIndexStateService indexStateService, - NodeSettingsService nodeSettingsService, ActionFilters actionFilters, + ClusterSettings clusterSettings, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, DestructiveOperations destructiveOperations) { super(settings, CloseIndexAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, CloseIndexRequest::new); this.indexStateService = indexStateService; this.destructiveOperations = destructiveOperations; - this.closeIndexEnabled = settings.getAsBoolean(SETTING_CLUSTER_INDICES_CLOSE_ENABLE, true); - nodeSettingsService.addListener(this); + this.closeIndexEnabled = CLUSTER_INDICES_CLOSE_ENABLE_SETTING.get(settings); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_INDICES_CLOSE_ENABLE_SETTING, this::setCloseIndexEnabled); + } + + private void setCloseIndexEnabled(boolean closeIndexEnabled) { + this.closeIndexEnabled = closeIndexEnabled; } @Override @@ -70,12 +76,12 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction listener) { + protected void doExecute(Task task, CloseIndexRequest request, ActionListener listener) { destructiveOperations.failDestructive(request.indices()); if (closeIndexEnabled == false) { - throw new IllegalStateException("closing indices is disabled - set [" + SETTING_CLUSTER_INDICES_CLOSE_ENABLE + ": true] to enable it. NOTE: closed indices still consume a significant amount of diskspace"); + throw new IllegalStateException("closing indices is disabled - set [" + CLUSTER_INDICES_CLOSE_ENABLE_SETTING.getKey() + ": true] to enable it. NOTE: closed indices still consume a significant amount of diskspace"); } - super.doExecute(request, listener); + super.doExecute(task, request, listener); } @Override @@ -104,13 +110,4 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction listener) { + protected void doExecute(Task task, DeleteIndexRequest request, ActionListener listener) { destructiveOperations.failDestructive(request.indices()); - super.doExecute(request, listener); + super.doExecute(task, request, listener); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java new file mode 100644 index 00000000000..291fd49c63a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.flush; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + + +public class SyncedFlushAction extends Action { + + public static final SyncedFlushAction INSTANCE = new SyncedFlushAction(); + public static final String NAME = "indices:admin/synced_flush"; + + private SyncedFlushAction() { + super(NAME); + } + + @Override + public SyncedFlushResponse newResponse() { + return new SyncedFlushResponse(); + } + + @Override + public SyncedFlushRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new SyncedFlushRequestBuilder(client, this); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java new file mode 100644 index 00000000000..59719fe8877 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.flush; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; + +import java.util.Arrays; + +/** + * A synced flush request to sync flush one or more indices. The synced flush process of an index performs a flush + * and writes the same sync id to primary and all copies. + * + *

Best created with {@link org.elasticsearch.client.Requests#syncedFlushRequest(String...)}.

+ * + * @see org.elasticsearch.client.Requests#flushRequest(String...) + * @see org.elasticsearch.client.IndicesAdminClient#syncedFlush(SyncedFlushRequest) + * @see SyncedFlushResponse + */ +public class SyncedFlushRequest extends BroadcastRequest { + + public SyncedFlushRequest() { + } + + /** + * Copy constructor that creates a new synced flush request that is a copy of the one provided as an argument. + * The new request will inherit though headers and context from the original request that caused it. + */ + public SyncedFlushRequest(ActionRequest originalRequest) { + super(originalRequest); + } + + /** + * Constructs a new synced flush request against one or more indices. If nothing is provided, all indices will + * be sync flushed. + */ + public SyncedFlushRequest(String... indices) { + super(indices); + } + + + @Override + public String toString() { + return "SyncedFlushRequest{" + + "indices=" + Arrays.toString(indices) + "}"; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequestBuilder.java new file mode 100644 index 00000000000..9e407260811 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequestBuilder.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.flush; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.ElasticsearchClient; + +public class SyncedFlushRequestBuilder extends ActionRequestBuilder { + + public SyncedFlushRequestBuilder(ElasticsearchClient client, SyncedFlushAction action) { + super(client, action, new SyncedFlushRequest()); + } + + public SyncedFlushRequestBuilder setIndices(String[] indices) { + super.request().indices(indices); + return this; + } + + public SyncedFlushRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) { + super.request().indicesOptions(indicesOptions); + return this; + } +} diff --git a/core/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushResponse.java similarity index 64% rename from core/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java rename to core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushResponse.java index 435c0d138cd..5925370e5f7 100644 --- a/core/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushResponse.java @@ -16,16 +16,25 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.indices.flush; +package org.elasticsearch.action.admin.indices.flush; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.indices.flush.ShardsSyncedFlushResult; +import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -34,13 +43,16 @@ import static java.util.Collections.unmodifiableMap; /** * The result of performing a sync flush operation on all shards of multiple indices */ -public class IndicesSyncedFlushResult implements ToXContent { +public class SyncedFlushResponse extends ActionResponse implements ToXContent { - final Map> shardsResultPerIndex; - final ShardCounts shardCounts; + Map> shardsResultPerIndex; + ShardCounts shardCounts; + SyncedFlushResponse() { - public IndicesSyncedFlushResult(Map> shardsResultPerIndex) { + } + + public SyncedFlushResponse(Map> shardsResultPerIndex) { // shardsResultPerIndex is never modified after it is passed to this // constructor so this is safe even though shardsResultPerIndex is a // ConcurrentHashMap @@ -48,17 +60,23 @@ public class IndicesSyncedFlushResult implements ToXContent { this.shardCounts = calculateShardCounts(Iterables.flatten(shardsResultPerIndex.values())); } - /** total number shards, including replicas, both assigned and unassigned */ + /** + * total number shards, including replicas, both assigned and unassigned + */ public int totalShards() { return shardCounts.total; } - /** total number of shards for which the operation failed */ + /** + * total number of shards for which the operation failed + */ public int failedShards() { return shardCounts.failed; } - /** total number of shards which were successfully sync-flushed */ + /** + * total number of shards which were successfully sync-flushed + */ public int successfulShards() { return shardCounts.successful; } @@ -91,8 +109,8 @@ public class IndicesSyncedFlushResult implements ToXContent { builder.endObject(); continue; } - Map failedShards = shardResults.failedShards(); - for (Map.Entry shardEntry : failedShards.entrySet()) { + Map failedShards = shardResults.failedShards(); + for (Map.Entry shardEntry : failedShards.entrySet()) { builder.startObject(); builder.field(Fields.SHARD, shardResults.shardId().id()); builder.field(Fields.REASON, shardEntry.getValue().failureReason()); @@ -123,11 +141,11 @@ public class IndicesSyncedFlushResult implements ToXContent { return new ShardCounts(total, successful, failed); } - static final class ShardCounts implements ToXContent { + static final class ShardCounts implements ToXContent, Streamable { - public final int total; - public final int successful; - public final int failed; + public int total; + public int successful; + public int failed; ShardCounts(int total, int successful, int failed) { this.total = total; @@ -135,6 +153,10 @@ public class IndicesSyncedFlushResult implements ToXContent { this.failed = failed; } + ShardCounts() { + + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(Fields.TOTAL, total); @@ -142,6 +164,20 @@ public class IndicesSyncedFlushResult implements ToXContent { builder.field(Fields.FAILED, failed); return builder; } + + @Override + public void readFrom(StreamInput in) throws IOException { + total = in.readInt(); + successful = in.readInt(); + failed = in.readInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeInt(total); + out.writeInt(successful); + out.writeInt(failed); + } } static final class Fields { @@ -154,4 +190,37 @@ public class IndicesSyncedFlushResult implements ToXContent { static final XContentBuilderString ROUTING = new XContentBuilderString("routing"); static final XContentBuilderString REASON = new XContentBuilderString("reason"); } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + shardCounts = new ShardCounts(); + shardCounts.readFrom(in); + Map> tmpShardsResultPerIndex = new HashMap<>(); + int numShardsResults = in.readInt(); + for (int i =0 ; i< numShardsResults; i++) { + String index = in.readString(); + List shardsSyncedFlushResults = new ArrayList<>(); + int numShards = in.readInt(); + for (int j =0; j< numShards; j++) { + shardsSyncedFlushResults.add(ShardsSyncedFlushResult.readShardsSyncedFlushResult(in)); + } + tmpShardsResultPerIndex.put(index, shardsSyncedFlushResults); + } + shardsResultPerIndex = Collections.unmodifiableMap(tmpShardsResultPerIndex); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + shardCounts.writeTo(out); + out.writeInt(shardsResultPerIndex.size()); + for (Map.Entry> entry : shardsResultPerIndex.entrySet()) { + out.writeString(entry.getKey()); + out.writeInt(entry.getValue().size()); + for (ShardsSyncedFlushResult shardsSyncedFlushResult : entry.getValue()) { + shardsSyncedFlushResult.writeTo(out); + } + } + } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java new file mode 100644 index 00000000000..3ba354f4629 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.flush; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.flush.SyncedFlushService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +/** + * Synced flush Action. + */ +public class TransportSyncedFlushAction extends HandledTransportAction { + + SyncedFlushService syncedFlushService; + + @Inject + public TransportSyncedFlushAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + SyncedFlushService syncedFlushService) { + super(settings, SyncedFlushAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, SyncedFlushRequest::new); + this.syncedFlushService = syncedFlushService; + } + + @Override + protected void doExecute(SyncedFlushRequest request, ActionListener listener) { + syncedFlushService.attemptSyncedFlush(request.indices(), request.indicesOptions(), listener); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponse.java index 5b999c0f5b7..3844f00193c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponse.java @@ -21,10 +21,7 @@ package org.elasticsearch.action.admin.indices.forcemerge; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import java.io.IOException; import java.util.List; /** diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java index 6eac403b212..1f06a25acc3 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.admin.indices.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index c71f60ee241..e9682692619 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -171,7 +171,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc for (String field : request.fields()) { if (Regex.isMatchAllPattern(field)) { for (FieldMapper fieldMapper : allFieldMappers) { - addFieldMapper(fieldMapper.fieldType().names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); + addFieldMapper(fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults()); } } else if (Regex.isSimpleMatchPattern(field)) { // go through the field mappers 3 times, to make sure we give preference to the resolve order: full name, index name, name. @@ -179,15 +179,15 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc Collection remainingFieldMappers = newLinkedList(allFieldMappers); for (Iterator it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); - if (Regex.simpleMatch(field, fieldMapper.fieldType().names().fullName())) { - addFieldMapper(fieldMapper.fieldType().names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); + if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) { + addFieldMapper(fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } for (Iterator it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); - if (Regex.simpleMatch(field, fieldMapper.fieldType().names().indexName())) { - addFieldMapper(fieldMapper.fieldType().names().indexName(), fieldMapper, fieldMappings, request.includeDefaults()); + if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) { + addFieldMapper(fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } @@ -214,7 +214,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc builder.startObject(); fieldMapper.toXContent(builder, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS); builder.endObject(); - fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().names().fullName(), builder.bytes())); + fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().name(), builder.bytes())); } catch (IOException e) { throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java index cab1047cac4..7ffb30b9534 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java @@ -32,12 +32,10 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.Arrays; - /** * Open index action */ @@ -49,7 +47,7 @@ public class TransportOpenIndexAction extends TransportMasterNodeAction listener) { + protected void doExecute(Task task, OpenIndexRequest request, ActionListener listener) { destructiveOperations.failDestructive(request.indices()); - super.doExecute(request, listener); + super.doExecute(task, request, listener); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java index c78f4d2abc4..2175cde4213 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java @@ -35,8 +35,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.Arrays; - /** * */ diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java index 710be185f15..9b129e0d729 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java @@ -20,9 +20,9 @@ package org.elasticsearch.action.admin.indices.shards; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java index 84b39d4c689..85a1c584b2a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java @@ -40,7 +40,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse.StoreStatus.*; +import static org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse.StoreStatus.readStoreStatus; /** * Response for {@link IndicesShardStoresAction} @@ -56,13 +56,14 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon public static class StoreStatus implements Streamable, ToXContent, Comparable { private DiscoveryNode node; private long version; + private String allocationId; private Throwable storeException; - private Allocation allocation; + private AllocationStatus allocationStatus; /** * The status of the shard store with respect to the cluster */ - public enum Allocation { + public enum AllocationStatus { /** * Allocated as primary @@ -81,16 +82,16 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon private final byte id; - Allocation(byte id) { + AllocationStatus(byte id) { this.id = id; } - private static Allocation fromId(byte id) { + private static AllocationStatus fromId(byte id) { switch (id) { case 0: return PRIMARY; case 1: return REPLICA; case 2: return UNUSED; - default: throw new IllegalArgumentException("unknown id for allocation [" + id + "]"); + default: throw new IllegalArgumentException("unknown id for allocation status [" + id + "]"); } } @@ -99,11 +100,11 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon case 0: return "primary"; case 1: return "replica"; case 2: return "unused"; - default: throw new IllegalArgumentException("unknown id for allocation [" + id + "]"); + default: throw new IllegalArgumentException("unknown id for allocation status [" + id + "]"); } } - private static Allocation readFrom(StreamInput in) throws IOException { + private static AllocationStatus readFrom(StreamInput in) throws IOException { return fromId(in.readByte()); } @@ -115,10 +116,11 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon private StoreStatus() { } - public StoreStatus(DiscoveryNode node, long version, Allocation allocation, Throwable storeException) { + public StoreStatus(DiscoveryNode node, long version, String allocationId, AllocationStatus allocationStatus, Throwable storeException) { this.node = node; this.version = version; - this.allocation = allocation; + this.allocationId = allocationId; + this.allocationStatus = allocationStatus; this.storeException = storeException; } @@ -130,13 +132,20 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon } /** - * Version of the store, used to select the store that will be - * used as a primary. + * Version of the store */ public long getVersion() { return version; } + /** + * AllocationStatus id of the store, used to select the store that will be + * used as a primary. + */ + public String getAllocationId() { + return allocationId; + } + /** * Exception while trying to open the * shard index or from when the shard failed @@ -146,13 +155,13 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon } /** - * The allocation status of the store. - * {@link Allocation#PRIMARY} indicates a primary shard copy - * {@link Allocation#REPLICA} indicates a replica shard copy - * {@link Allocation#UNUSED} indicates an unused shard copy + * The allocationStatus status of the store. + * {@link AllocationStatus#PRIMARY} indicates a primary shard copy + * {@link AllocationStatus#REPLICA} indicates a replica shard copy + * {@link AllocationStatus#UNUSED} indicates an unused shard copy */ - public Allocation getAllocation() { - return allocation; + public AllocationStatus getAllocationStatus() { + return allocationStatus; } static StoreStatus readStoreStatus(StreamInput in) throws IOException { @@ -165,7 +174,8 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon public void readFrom(StreamInput in) throws IOException { node = DiscoveryNode.readNode(in); version = in.readLong(); - allocation = Allocation.readFrom(in); + allocationId = in.readOptionalString(); + allocationStatus = AllocationStatus.readFrom(in); if (in.readBoolean()) { storeException = in.readThrowable(); } @@ -175,7 +185,8 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon public void writeTo(StreamOutput out) throws IOException { node.writeTo(out); out.writeLong(version); - allocation.writeTo(out); + out.writeOptionalString(allocationId); + allocationStatus.writeTo(out); if (storeException != null) { out.writeBoolean(true); out.writeThrowable(storeException); @@ -188,7 +199,8 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { node.toXContent(builder, params); builder.field(Fields.VERSION, version); - builder.field(Fields.ALLOCATED, allocation.value()); + builder.field(Fields.ALLOCATION_ID, allocationId); + builder.field(Fields.ALLOCATED, allocationStatus.value()); if (storeException != null) { builder.startObject(Fields.STORE_EXCEPTION); ElasticsearchException.toXContent(builder, params, storeException); @@ -206,7 +218,7 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon } else { int compare = Long.compare(other.version, version); if (compare == 0) { - return Integer.compare(allocation.id, other.allocation.id); + return Integer.compare(allocationStatus.id, other.allocationStatus.id); } return compare; } @@ -379,6 +391,7 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon static final XContentBuilderString STORES = new XContentBuilderString("stores"); // StoreStatus fields static final XContentBuilderString VERSION = new XContentBuilderString("version"); + static final XContentBuilderString ALLOCATION_ID = new XContentBuilderString("allocation_id"); static final XContentBuilderString STORE_EXCEPTION = new XContentBuilderString("store_exception"); static final XContentBuilderString ALLOCATED = new XContentBuilderString("allocation"); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java index 336ebc254b4..77f8608089c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java @@ -21,14 +21,14 @@ package org.elasticsearch.action.admin.indices.shards; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.health.ClusterShardHealth; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.health.ClusterShardHealth; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -179,8 +179,8 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc } for (NodeGatewayStartedShards response : fetchResponse.responses) { if (shardExistsInNode(response)) { - IndicesShardStoresResponse.StoreStatus.Allocation allocation = getAllocation(fetchResponse.shardId.getIndex(), fetchResponse.shardId.id(), response.getNode()); - storeStatuses.add(new IndicesShardStoresResponse.StoreStatus(response.getNode(), response.version(), allocation, response.storeException())); + IndicesShardStoresResponse.StoreStatus.AllocationStatus allocationStatus = getAllocationStatus(fetchResponse.shardId.getIndex(), fetchResponse.shardId.id(), response.getNode()); + storeStatuses.add(new IndicesShardStoresResponse.StoreStatus(response.getNode(), response.version(), response.allocationId(), allocationStatus, response.storeException())); } } CollectionUtil.timSort(storeStatuses); @@ -193,27 +193,27 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc listener.onResponse(new IndicesShardStoresResponse(indicesStoreStatusesBuilder.build(), Collections.unmodifiableList(failureBuilder))); } - private IndicesShardStoresResponse.StoreStatus.Allocation getAllocation(String index, int shardID, DiscoveryNode node) { + private IndicesShardStoresResponse.StoreStatus.AllocationStatus getAllocationStatus(String index, int shardID, DiscoveryNode node) { for (ShardRouting shardRouting : routingNodes.node(node.id())) { ShardId shardId = shardRouting.shardId(); if (shardId.id() == shardID && shardId.getIndex().equals(index)) { if (shardRouting.primary()) { - return IndicesShardStoresResponse.StoreStatus.Allocation.PRIMARY; + return IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY; } else if (shardRouting.assignedToNode()) { - return IndicesShardStoresResponse.StoreStatus.Allocation.REPLICA; + return IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA; } else { - return IndicesShardStoresResponse.StoreStatus.Allocation.UNUSED; + return IndicesShardStoresResponse.StoreStatus.AllocationStatus.UNUSED; } } } - return IndicesShardStoresResponse.StoreStatus.Allocation.UNUSED; + return IndicesShardStoresResponse.StoreStatus.AllocationStatus.UNUSED; } /** * A shard exists/existed in a node only if shard state file exists in the node */ private boolean shardExistsInNode(final NodeGatewayStartedShards response) { - return response.storeException() != null || response.version() != -1; + return response.storeException() != null || response.version() != -1 || response.allocationId() != null; } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 08d6f9e6c49..55ad75eab33 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.engine.CommitStats; import java.io.IOException; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java index 8fea8c795eb..045fcc0c1e2 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.engine.CommitStats; -import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardPath; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/get/GetWarmersResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/get/GetWarmersResponse.java index 57e0b746496..0559e522925 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/get/GetWarmersResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/get/GetWarmersResponse.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.admin.indices.warmer.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java index d72be81028d..8dd671b4da0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java @@ -34,11 +34,9 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.warmer.IndexWarmersMetaData; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BackoffPolicy.java b/core/src/main/java/org/elasticsearch/action/bulk/BackoffPolicy.java new file mode 100644 index 00000000000..a0ccca0fb5c --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/bulk/BackoffPolicy.java @@ -0,0 +1,203 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.bulk; + +import org.elasticsearch.common.unit.TimeValue; + +import java.util.Iterator; +import java.util.NoSuchElementException; + +/** + * Provides a backoff policy for bulk requests. Whenever a bulk request is rejected due to resource constraints (i.e. the client's internal + * thread pool is full), the backoff policy decides how long the bulk processor will wait before the operation is retried internally. + * + * Notes for implementing custom subclasses: + * + * The underlying mathematical principle of BackoffPolicy are progressions which can be either finite or infinite although + * the latter should not be used for retrying. A progression can be mapped to a java.util.Iterator with the following + * semantics: + * + *
    + *
  • #hasNext() determines whether the progression has more elements. Return true for infinite progressions
  • + *
  • #next() determines the next element in the progression, i.e. the next wait time period
  • + *
+ * + * Note that backoff policies are exposed as Iterables in order to be consumed multiple times. + */ +public abstract class BackoffPolicy implements Iterable { + private static final BackoffPolicy NO_BACKOFF = new NoBackoff(); + + /** + * Creates a backoff policy that will not allow any backoff, i.e. an operation will fail after the first attempt. + * + * @return A backoff policy without any backoff period. The returned instance is thread safe. + */ + public static BackoffPolicy noBackoff() { + return NO_BACKOFF; + } + + /** + * Creates an new constant backoff policy with the provided configuration. + * + * @param delay The delay defines how long to wait between retry attempts. Must not be null. + * Must be <= Integer.MAX_VALUE ms. + * @param maxNumberOfRetries The maximum number of retries. Must be a non-negative number. + * @return A backoff policy with a constant wait time between retries. The returned instance is thread safe but each + * iterator created from it should only be used by a single thread. + */ + public static BackoffPolicy constantBackoff(TimeValue delay, int maxNumberOfRetries) { + return new ConstantBackoff(checkDelay(delay), maxNumberOfRetries); + } + + /** + * Creates an new exponential backoff policy with a default configuration of 50 ms initial wait period and 8 retries taking + * roughly 5.1 seconds in total. + * + * @return A backoff policy with an exponential increase in wait time for retries. The returned instance is thread safe but each + * iterator created from it should only be used by a single thread. + */ + public static BackoffPolicy exponentialBackoff() { + return exponentialBackoff(TimeValue.timeValueMillis(50), 8); + } + + /** + * Creates an new exponential backoff policy with the provided configuration. + * + * @param initialDelay The initial delay defines how long to wait for the first retry attempt. Must not be null. + * Must be <= Integer.MAX_VALUE ms. + * @param maxNumberOfRetries The maximum number of retries. Must be a non-negative number. + * @return A backoff policy with an exponential increase in wait time for retries. The returned instance is thread safe but each + * iterator created from it should only be used by a single thread. + */ + public static BackoffPolicy exponentialBackoff(TimeValue initialDelay, int maxNumberOfRetries) { + return new ExponentialBackoff((int) checkDelay(initialDelay).millis(), maxNumberOfRetries); + } + + private static TimeValue checkDelay(TimeValue delay) { + if (delay.millis() > Integer.MAX_VALUE) { + throw new IllegalArgumentException("delay must be <= " + Integer.MAX_VALUE + " ms"); + } + return delay; + } + + private static class NoBackoff extends BackoffPolicy { + @Override + public Iterator iterator() { + return new Iterator() { + @Override + public boolean hasNext() { + return false; + } + + @Override + public TimeValue next() { + throw new NoSuchElementException("No backoff"); + } + }; + } + } + + private static class ExponentialBackoff extends BackoffPolicy { + private final int start; + + private final int numberOfElements; + + private ExponentialBackoff(int start, int numberOfElements) { + assert start >= 0; + assert numberOfElements >= 0; + this.start = start; + this.numberOfElements = numberOfElements; + } + + @Override + public Iterator iterator() { + return new ExponentialBackoffIterator(start, numberOfElements); + } + } + + private static class ExponentialBackoffIterator implements Iterator { + private final int numberOfElements; + + private final int start; + + private int currentlyConsumed; + + private ExponentialBackoffIterator(int start, int numberOfElements) { + this.start = start; + this.numberOfElements = numberOfElements; + } + + @Override + public boolean hasNext() { + return currentlyConsumed < numberOfElements; + } + + @Override + public TimeValue next() { + if (!hasNext()) { + throw new NoSuchElementException("Only up to " + numberOfElements + " elements"); + } + int result = start + 10 * ((int) Math.exp(0.8d * (currentlyConsumed)) - 1); + currentlyConsumed++; + return TimeValue.timeValueMillis(result); + } + } + + private static final class ConstantBackoff extends BackoffPolicy { + private final TimeValue delay; + + private final int numberOfElements; + + public ConstantBackoff(TimeValue delay, int numberOfElements) { + assert numberOfElements >= 0; + this.delay = delay; + this.numberOfElements = numberOfElements; + } + + @Override + public Iterator iterator() { + return new ConstantBackoffIterator(delay, numberOfElements); + } + } + + private static final class ConstantBackoffIterator implements Iterator { + private final TimeValue delay; + private final int numberOfElements; + private int curr; + + public ConstantBackoffIterator(TimeValue delay, int numberOfElements) { + this.delay = delay; + this.numberOfElements = numberOfElements; + } + + @Override + public boolean hasNext() { + return curr < numberOfElements; + } + + @Override + public TimeValue next() { + if (!hasNext()) { + throw new NoSuchElementException(); + } + curr++; + return delay; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java index 712007c1215..760c5781aea 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.bulk; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.delete.DeleteRequest; diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index 2a7c185ad8a..78a0c76702f 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.bulk; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; @@ -33,7 +32,11 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.FutureUtils; import java.io.Closeable; -import java.util.concurrent.*; +import java.util.Objects; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; /** @@ -48,7 +51,7 @@ public class BulkProcessor implements Closeable { /** * A listener for the execution. */ - public static interface Listener { + public interface Listener { /** * Callback before the bulk is executed. @@ -62,6 +65,9 @@ public class BulkProcessor implements Closeable { /** * Callback after a failed execution of bulk request. + * + * Note that in case an instance of InterruptedException is passed, which means that request processing has been + * cancelled externally, the thread's interruption status has been restored prior to calling this method. */ void afterBulk(long executionId, BulkRequest request, Throwable failure); } @@ -79,6 +85,7 @@ public class BulkProcessor implements Closeable { private int bulkActions = 1000; private ByteSizeValue bulkSize = new ByteSizeValue(5, ByteSizeUnit.MB); private TimeValue flushInterval = null; + private BackoffPolicy backoffPolicy = BackoffPolicy.exponentialBackoff(); /** * Creates a builder of bulk processor with the client to use and the listener that will be used @@ -136,54 +143,58 @@ public class BulkProcessor implements Closeable { return this; } + /** + * Sets a custom backoff policy. The backoff policy defines how the bulk processor should handle retries of bulk requests internally + * in case they have failed due to resource constraints (i.e. a thread pool was full). + * + * The default is to back off exponentially. + * + * @see org.elasticsearch.action.bulk.BackoffPolicy#exponentialBackoff() + */ + public Builder setBackoffPolicy(BackoffPolicy backoffPolicy) { + if (backoffPolicy == null) { + throw new NullPointerException("'backoffPolicy' must not be null. To disable backoff, pass BackoffPolicy.noBackoff()"); + } + this.backoffPolicy = backoffPolicy; + return this; + } + /** * Builds a new bulk processor. */ public BulkProcessor build() { - return new BulkProcessor(client, listener, name, concurrentRequests, bulkActions, bulkSize, flushInterval); + return new BulkProcessor(client, backoffPolicy, listener, name, concurrentRequests, bulkActions, bulkSize, flushInterval); } } public static Builder builder(Client client, Listener listener) { - if (client == null) { - throw new NullPointerException("The client you specified while building a BulkProcessor is null"); - } - + Objects.requireNonNull(client, "client"); + Objects.requireNonNull(listener, "listener"); + return new Builder(client, listener); } - private final Client client; - private final Listener listener; - - private final String name; - - private final int concurrentRequests; private final int bulkActions; private final long bulkSize; - private final TimeValue flushInterval; - private final Semaphore semaphore; + private final ScheduledThreadPoolExecutor scheduler; private final ScheduledFuture scheduledFuture; private final AtomicLong executionIdGen = new AtomicLong(); private BulkRequest bulkRequest; + private final BulkRequestHandler bulkRequestHandler; private volatile boolean closed = false; - BulkProcessor(Client client, Listener listener, @Nullable String name, int concurrentRequests, int bulkActions, ByteSizeValue bulkSize, @Nullable TimeValue flushInterval) { - this.client = client; - this.listener = listener; - this.name = name; - this.concurrentRequests = concurrentRequests; + BulkProcessor(Client client, BackoffPolicy backoffPolicy, Listener listener, @Nullable String name, int concurrentRequests, int bulkActions, ByteSizeValue bulkSize, @Nullable TimeValue flushInterval) { this.bulkActions = bulkActions; this.bulkSize = bulkSize.bytes(); - this.semaphore = new Semaphore(concurrentRequests); this.bulkRequest = new BulkRequest(); + this.bulkRequestHandler = (concurrentRequests == 0) ? BulkRequestHandler.syncHandler(client, backoffPolicy, listener) : BulkRequestHandler.asyncHandler(client, backoffPolicy, listener, concurrentRequests); - this.flushInterval = flushInterval; if (flushInterval != null) { this.scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1, EsExecutors.daemonThreadFactory(client.settings(), (name != null ? "[" + name + "]" : "") + "bulk_processor")); this.scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); @@ -231,14 +242,7 @@ public class BulkProcessor implements Closeable { if (bulkRequest.numberOfActions() > 0) { execute(); } - if (this.concurrentRequests < 1) { - return true; - } - if (semaphore.tryAcquire(this.concurrentRequests, timeout, unit)) { - semaphore.release(this.concurrentRequests); - return true; - } - return false; + return this.bulkRequestHandler.awaitClose(timeout, unit); } /** @@ -308,58 +312,7 @@ public class BulkProcessor implements Closeable { final long executionId = executionIdGen.incrementAndGet(); this.bulkRequest = new BulkRequest(); - - if (concurrentRequests == 0) { - // execute in a blocking fashion... - boolean afterCalled = false; - try { - listener.beforeBulk(executionId, bulkRequest); - BulkResponse bulkItemResponses = client.bulk(bulkRequest).actionGet(); - afterCalled = true; - listener.afterBulk(executionId, bulkRequest, bulkItemResponses); - } catch (Exception e) { - if (!afterCalled) { - listener.afterBulk(executionId, bulkRequest, e); - } - } - } else { - boolean success = false; - boolean acquired = false; - try { - listener.beforeBulk(executionId, bulkRequest); - semaphore.acquire(); - acquired = true; - client.bulk(bulkRequest, new ActionListener() { - @Override - public void onResponse(BulkResponse response) { - try { - listener.afterBulk(executionId, bulkRequest, response); - } finally { - semaphore.release(); - } - } - - @Override - public void onFailure(Throwable e) { - try { - listener.afterBulk(executionId, bulkRequest, e); - } finally { - semaphore.release(); - } - } - }); - success = true; - } catch (InterruptedException e) { - Thread.interrupted(); - listener.afterBulk(executionId, bulkRequest, e); - } catch (Throwable t) { - listener.afterBulk(executionId, bulkRequest, t); - } finally { - if (!success && acquired) { // if we fail on client.bulk() release the semaphore - semaphore.release(); - } - } - } + this.bulkRequestHandler.execute(bulkRequest, executionId); } private boolean isOverTheLimit() { diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java new file mode 100644 index 00000000000..dc98a16c578 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java @@ -0,0 +1,166 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.bulk; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; + +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; + +/** + * Abstracts the low-level details of bulk request handling + */ +abstract class BulkRequestHandler { + protected final ESLogger logger; + protected final Client client; + + protected BulkRequestHandler(Client client) { + this.client = client; + this.logger = Loggers.getLogger(getClass(), client.settings()); + } + + + public abstract void execute(BulkRequest bulkRequest, long executionId); + + public abstract boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException; + + + public static BulkRequestHandler syncHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener) { + return new SyncBulkRequestHandler(client, backoffPolicy, listener); + } + + public static BulkRequestHandler asyncHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener, int concurrentRequests) { + return new AsyncBulkRequestHandler(client, backoffPolicy, listener, concurrentRequests); + } + + private static class SyncBulkRequestHandler extends BulkRequestHandler { + private final BulkProcessor.Listener listener; + private final BackoffPolicy backoffPolicy; + + public SyncBulkRequestHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener) { + super(client); + this.backoffPolicy = backoffPolicy; + this.listener = listener; + } + + @Override + public void execute(BulkRequest bulkRequest, long executionId) { + boolean afterCalled = false; + try { + listener.beforeBulk(executionId, bulkRequest); + BulkResponse bulkResponse = Retry + .on(EsRejectedExecutionException.class) + .policy(backoffPolicy) + .withSyncBackoff(client, bulkRequest); + afterCalled = true; + listener.afterBulk(executionId, bulkRequest, bulkResponse); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.info("Bulk request {} has been cancelled.", e, executionId); + if (!afterCalled) { + listener.afterBulk(executionId, bulkRequest, e); + } + } catch (Throwable t) { + logger.warn("Failed to execute bulk request {}.", t, executionId); + if (!afterCalled) { + listener.afterBulk(executionId, bulkRequest, t); + } + } + } + + @Override + public boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException { + // we are "closed" immediately as there is no request in flight + return true; + } + } + + private static class AsyncBulkRequestHandler extends BulkRequestHandler { + private final BackoffPolicy backoffPolicy; + private final BulkProcessor.Listener listener; + private final Semaphore semaphore; + private final int concurrentRequests; + + private AsyncBulkRequestHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener, int concurrentRequests) { + super(client); + this.backoffPolicy = backoffPolicy; + assert concurrentRequests > 0; + this.listener = listener; + this.concurrentRequests = concurrentRequests; + this.semaphore = new Semaphore(concurrentRequests); + } + + @Override + public void execute(BulkRequest bulkRequest, long executionId) { + boolean bulkRequestSetupSuccessful = false; + boolean acquired = false; + try { + listener.beforeBulk(executionId, bulkRequest); + semaphore.acquire(); + acquired = true; + Retry.on(EsRejectedExecutionException.class) + .policy(backoffPolicy) + .withAsyncBackoff(client, bulkRequest, new ActionListener() { + @Override + public void onResponse(BulkResponse response) { + try { + listener.afterBulk(executionId, bulkRequest, response); + } finally { + semaphore.release(); + } + } + + @Override + public void onFailure(Throwable e) { + try { + listener.afterBulk(executionId, bulkRequest, e); + } finally { + semaphore.release(); + } + } + }); + bulkRequestSetupSuccessful = true; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.info("Bulk request {} has been cancelled.", e, executionId); + listener.afterBulk(executionId, bulkRequest, e); + } catch (Throwable t) { + logger.warn("Failed to execute bulk request {}.", t, executionId); + listener.afterBulk(executionId, bulkRequest, t); + } finally { + if (!bulkRequestSetupSuccessful && acquired) { // if we fail on client.bulk() release the semaphore + semaphore.release(); + } + } + } + + @Override + public boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException { + if (semaphore.tryAcquire(this.concurrentRequests, timeout, unit)) { + semaphore.release(this.concurrentRequests); + return true; + } + return false; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/bulk/Retry.java b/core/src/main/java/org/elasticsearch/action/bulk/Retry.java new file mode 100644 index 00000000000..72e0da71921 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/bulk/Retry.java @@ -0,0 +1,237 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.bulk; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.ScheduledFuture; +import java.util.function.Predicate; + +/** + * Encapsulates synchronous and asynchronous retry logic. + */ +class Retry { + private final Class retryOnThrowable; + + private BackoffPolicy backoffPolicy; + + public static Retry on(Class retryOnThrowable) { + return new Retry(retryOnThrowable); + } + + /** + * @param backoffPolicy The backoff policy that defines how long and how often to wait for retries. + */ + public Retry policy(BackoffPolicy backoffPolicy) { + this.backoffPolicy = backoffPolicy; + return this; + } + + Retry(Class retryOnThrowable) { + this.retryOnThrowable = retryOnThrowable; + } + + /** + * Invokes #bulk(BulkRequest, ActionListener) on the provided client. Backs off on the provided exception and delegates results to the + * provided listener. + * + * @param client Client invoking the bulk request. + * @param bulkRequest The bulk request that should be executed. + * @param listener A listener that is invoked when the bulk request finishes or completes with an exception. The listener is not + */ + public void withAsyncBackoff(Client client, BulkRequest bulkRequest, ActionListener listener) { + AsyncRetryHandler r = new AsyncRetryHandler(retryOnThrowable, backoffPolicy, client, listener); + r.execute(bulkRequest); + + } + + /** + * Invokes #bulk(BulkRequest) on the provided client. Backs off on the provided exception. + * + * @param client Client invoking the bulk request. + * @param bulkRequest The bulk request that should be executed. + * @return the bulk response as returned by the client. + * @throws Exception Any exception thrown by the callable. + */ + public BulkResponse withSyncBackoff(Client client, BulkRequest bulkRequest) throws Exception { + return SyncRetryHandler + .create(retryOnThrowable, backoffPolicy, client) + .executeBlocking(bulkRequest) + .actionGet(); + } + + static class AbstractRetryHandler implements ActionListener { + private final ESLogger logger; + private final Client client; + private final ActionListener listener; + private final Iterator backoff; + private final Class retryOnThrowable; + // Access only when holding a client-side lock, see also #addResponses() + private final List responses = new ArrayList<>(); + private final long startTimestampNanos; + // needed to construct the next bulk request based on the response to the previous one + // volatile as we're called from a scheduled thread + private volatile BulkRequest currentBulkRequest; + private volatile ScheduledFuture scheduledRequestFuture; + + public AbstractRetryHandler(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener listener) { + this.retryOnThrowable = retryOnThrowable; + this.backoff = backoffPolicy.iterator(); + this.client = client; + this.listener = listener; + this.logger = Loggers.getLogger(getClass(), client.settings()); + // in contrast to System.currentTimeMillis(), nanoTime() uses a monotonic clock under the hood + this.startTimestampNanos = System.nanoTime(); + } + + @Override + public void onResponse(BulkResponse bulkItemResponses) { + if (!bulkItemResponses.hasFailures()) { + // we're done here, include all responses + addResponses(bulkItemResponses, (r -> true)); + finishHim(); + } else { + if (canRetry(bulkItemResponses)) { + addResponses(bulkItemResponses, (r -> !r.isFailed())); + retry(createBulkRequestForRetry(bulkItemResponses)); + } else { + addResponses(bulkItemResponses, (r -> true)); + finishHim(); + } + } + } + + @Override + public void onFailure(Throwable e) { + try { + listener.onFailure(e); + } finally { + FutureUtils.cancel(scheduledRequestFuture); + } + } + + private void retry(BulkRequest bulkRequestForRetry) { + assert backoff.hasNext(); + TimeValue next = backoff.next(); + logger.trace("Retry of bulk request scheduled in {} ms.", next.millis()); + scheduledRequestFuture = client.threadPool().schedule(next, ThreadPool.Names.SAME, (() -> this.execute(bulkRequestForRetry))); + } + + private BulkRequest createBulkRequestForRetry(BulkResponse bulkItemResponses) { + BulkRequest requestToReissue = new BulkRequest(); + int index = 0; + for (BulkItemResponse bulkItemResponse : bulkItemResponses.getItems()) { + if (bulkItemResponse.isFailed()) { + requestToReissue.add(currentBulkRequest.requests().get(index)); + } + index++; + } + return requestToReissue; + } + + private boolean canRetry(BulkResponse bulkItemResponses) { + if (!backoff.hasNext()) { + return false; + } + for (BulkItemResponse bulkItemResponse : bulkItemResponses) { + if (bulkItemResponse.isFailed()) { + Throwable cause = bulkItemResponse.getFailure().getCause(); + Throwable rootCause = ExceptionsHelper.unwrapCause(cause); + if (!rootCause.getClass().equals(retryOnThrowable)) { + return false; + } + } + } + return true; + } + + private void finishHim() { + try { + listener.onResponse(getAccumulatedResponse()); + } finally { + FutureUtils.cancel(scheduledRequestFuture); + } + } + + private void addResponses(BulkResponse response, Predicate filter) { + for (BulkItemResponse bulkItemResponse : response) { + if (filter.test(bulkItemResponse)) { + // Use client-side lock here to avoid visibility issues. This method may be called multiple times + // (based on how many retries we have to issue) and relying that the response handling code will be + // scheduled on the same thread is fragile. + synchronized (responses) { + responses.add(bulkItemResponse); + } + } + } + } + + private BulkResponse getAccumulatedResponse() { + BulkItemResponse[] itemResponses; + synchronized (responses) { + itemResponses = responses.toArray(new BulkItemResponse[1]); + } + long stopTimestamp = System.nanoTime(); + long totalLatencyMs = TimeValue.timeValueNanos(stopTimestamp - startTimestampNanos).millis(); + return new BulkResponse(itemResponses, totalLatencyMs); + } + + public void execute(BulkRequest bulkRequest) { + this.currentBulkRequest = bulkRequest; + client.bulk(bulkRequest, this); + } + } + + static class AsyncRetryHandler extends AbstractRetryHandler { + public AsyncRetryHandler(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener listener) { + super(retryOnThrowable, backoffPolicy, client, listener); + } + } + + static class SyncRetryHandler extends AbstractRetryHandler { + private final PlainActionFuture actionFuture; + + public static SyncRetryHandler create(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client) { + PlainActionFuture actionFuture = PlainActionFuture.newFuture(); + return new SyncRetryHandler(retryOnThrowable, backoffPolicy, client, actionFuture); + } + + public SyncRetryHandler(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client, PlainActionFuture actionFuture) { + super(retryOnThrowable, backoffPolicy, client, actionFuture); + this.actionFuture = actionFuture; + } + + public ActionFuture executeBlocking(BulkRequest bulkRequest) { + super.execute(bulkRequest); + return actionFuture; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 9b18d0328e7..7252993427f 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -239,7 +239,7 @@ public class TransportBulkAction extends HandledTransportAction implements private String id; @Nullable private String routing; + @Nullable + private String parent; private boolean refresh; private long version = Versions.MATCH_ANY; private VersionType versionType = VersionType.INTERNAL; @@ -94,6 +96,7 @@ public class DeleteRequest extends ReplicationRequest implements this.type = request.type(); this.id = request.id(); this.routing = request.routing(); + this.parent = request.parent(); this.refresh = request.refresh(); this.version = request.version(); this.versionType = request.versionType(); @@ -155,13 +158,18 @@ public class DeleteRequest extends ReplicationRequest implements } /** - * Sets the parent id of this document. Will simply set the routing to this value, as it is only - * used for routing with delete requests. + * @return The parent for this request. + */ + @Override + public String parent() { + return parent; + } + + /** + * Sets the parent id of this document. */ public DeleteRequest parent(String parent) { - if (routing == null) { - routing = parent; - } + this.parent = parent; return this; } @@ -230,6 +238,7 @@ public class DeleteRequest extends ReplicationRequest implements type = in.readString(); id = in.readString(); routing = in.readOptionalString(); + parent = in.readOptionalString(); refresh = in.readBoolean(); version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); @@ -241,6 +250,7 @@ public class DeleteRequest extends ReplicationRequest implements out.writeString(type); out.writeString(id); out.writeOptionalString(routing()); + out.writeOptionalString(parent()); out.writeBoolean(refresh); out.writeLong(version); out.writeByte(versionType.getValue()); diff --git a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java index ca66b285753..f80b1a24396 100644 --- a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java @@ -95,7 +95,7 @@ public class TransportDeleteAction extends TransportReplicationAction { diff --git a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java index 935170e4b4c..c6919e858d1 100644 --- a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java @@ -49,6 +49,7 @@ public class GetRequest extends SingleShardRequest implements Realti private String type; private String id; private String routing; + private String parent; private String preference; private String[] fields; @@ -77,6 +78,7 @@ public class GetRequest extends SingleShardRequest implements Realti this.type = getRequest.type; this.id = getRequest.id; this.routing = getRequest.routing; + this.parent = getRequest.parent; this.preference = getRequest.preference; this.fields = getRequest.fields; this.fetchSourceContext = getRequest.fetchSourceContext; @@ -153,13 +155,17 @@ public class GetRequest extends SingleShardRequest implements Realti } /** - * Sets the parent id of this document. Will simply set the routing to this value, as it is only - * used for routing with delete requests. + * @return The parent for this request. + */ + public String parent() { + return parent; + } + + /** + * Sets the parent id of this document. */ public GetRequest parent(String parent) { - if (routing == null) { - routing = parent; - } + this.parent = parent; return this; } @@ -291,6 +297,7 @@ public class GetRequest extends SingleShardRequest implements Realti type = in.readString(); id = in.readString(); routing = in.readOptionalString(); + parent = in.readOptionalString(); preference = in.readOptionalString(); refresh = in.readBoolean(); int size = in.readInt(); @@ -320,6 +327,7 @@ public class GetRequest extends SingleShardRequest implements Realti out.writeString(type); out.writeString(id); out.writeOptionalString(routing); + out.writeOptionalString(parent); out.writeOptionalString(preference); out.writeBoolean(refresh); diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java index a6af7050fd7..db3c0f72bfd 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java @@ -20,13 +20,17 @@ package org.elasticsearch.action.get; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.*; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.CompositeIndicesRequest; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.RealtimeRequest; +import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -37,7 +41,11 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.search.fetch.source.FetchSourceContext; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; public class MultiGetRequest extends ActionRequest implements Iterable, CompositeIndicesRequest, RealtimeRequest { @@ -49,6 +57,7 @@ public class MultiGetRequest extends ActionRequest implements I private String type; private String id; private String routing; + private String parent; private String[] fields; private long version = Versions.MATCH_ANY; private VersionType versionType = VersionType.INTERNAL; @@ -116,12 +125,17 @@ public class MultiGetRequest extends ActionRequest implements I } public Item parent(String parent) { - if (routing == null) { - this.routing = parent; - } + this.parent = parent; return this; } + /** + * @return The parent for this request. + */ + public String parent() { + return parent; + } + public Item fields(String... fields) { this.fields = fields; return this; @@ -173,6 +187,7 @@ public class MultiGetRequest extends ActionRequest implements I type = in.readOptionalString(); id = in.readString(); routing = in.readOptionalString(); + parent = in.readOptionalString(); int size = in.readVInt(); if (size > 0) { fields = new String[size]; @@ -192,6 +207,7 @@ public class MultiGetRequest extends ActionRequest implements I out.writeOptionalString(type); out.writeString(id); out.writeOptionalString(routing); + out.writeOptionalString(parent); if (fields == null) { out.writeVInt(0); } else { @@ -221,6 +237,7 @@ public class MultiGetRequest extends ActionRequest implements I if (!id.equals(item.id)) return false; if (!index.equals(item.index)) return false; if (routing != null ? !routing.equals(item.routing) : item.routing != null) return false; + if (parent != null ? !parent.equals(item.parent) : item.parent != null) return false; if (type != null ? !type.equals(item.type) : item.type != null) return false; if (versionType != item.versionType) return false; @@ -233,6 +250,7 @@ public class MultiGetRequest extends ActionRequest implements I result = 31 * result + (type != null ? type.hashCode() : 0); result = 31 * result + id.hashCode(); result = 31 * result + (routing != null ? routing.hashCode() : 0); + result = 31 * result + (parent != null ? parent.hashCode() : 0); result = 31 * result + (fields != null ? Arrays.hashCode(fields) : 0); result = 31 * result + Long.hashCode(version); result = 31 * result + versionType.hashCode(); diff --git a/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index a2cb9873474..2d6bafc9623 100644 --- a/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -82,7 +82,7 @@ public class TransportGetAction extends TransportSingleShardAction implements Do } /** - * Sets the parent id of this document. If routing is not set, automatically set it as the - * routing as well. + * Sets the parent id of this document. */ public IndexRequest parent(String parent) { this.parent = parent; - if (routing == null) { - routing = parent; - } return this; } @@ -593,7 +594,7 @@ public class IndexRequest extends ReplicationRequest implements Do public void process(MetaData metaData, @Nullable MappingMetaData mappingMd, boolean allowIdGeneration, String concreteIndex) { // resolve the routing if needed - routing(metaData.resolveIndexRouting(routing, index)); + routing(metaData.resolveIndexRouting(parent, routing, index)); // resolve timestamp if provided externally if (timestamp != null) { @@ -601,41 +602,7 @@ public class IndexRequest extends ReplicationRequest implements Do mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER, getVersion(metaData, concreteIndex)); } - // extract values if needed if (mappingMd != null) { - MappingMetaData.ParseContext parseContext = mappingMd.createParseContext(id, routing, timestamp); - - if (parseContext.shouldParse()) { - XContentParser parser = null; - try { - parser = XContentHelper.createParser(source); - mappingMd.parse(parser, parseContext); - if (parseContext.shouldParseId()) { - id = parseContext.id(); - } - if (parseContext.shouldParseRouting()) { - if (routing != null && !routing.equals(parseContext.routing())) { - throw new MapperParsingException("The provided routing value [" + routing + "] doesn't match the routing key stored in the document: [" + parseContext.routing() + "]"); - } - routing = parseContext.routing(); - } - if (parseContext.shouldParseTimestamp()) { - timestamp = parseContext.timestamp(); - if (timestamp != null) { - timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, mappingMd.timestamp().dateTimeFormatter(), getVersion(metaData, concreteIndex)); - } - } - } catch (MapperParsingException e) { - throw e; - } catch (Exception e) { - throw new ElasticsearchParseException("failed to parse doc to extract routing/timestamp/id", e); - } finally { - if (parser != null) { - parser.close(); - } - } - } - // might as well check for routing here if (mappingMd.routing().required() && routing == null) { throw new RoutingMissingException(concreteIndex, type, id); diff --git a/core/src/main/java/org/elasticsearch/action/indexedscripts/get/GetIndexedScriptResponse.java b/core/src/main/java/org/elasticsearch/action/indexedscripts/get/GetIndexedScriptResponse.java index d826de0bb53..04470850030 100644 --- a/core/src/main/java/org/elasticsearch/action/indexedscripts/get/GetIndexedScriptResponse.java +++ b/core/src/main/java/org/elasticsearch/action/indexedscripts/get/GetIndexedScriptResponse.java @@ -30,7 +30,6 @@ import org.elasticsearch.script.ScriptService; import java.io.IOException; import java.util.Iterator; -import java.util.Map; /** * The response of a get script action. diff --git a/core/src/main/java/org/elasticsearch/action/indexedscripts/put/PutIndexedScriptRequest.java b/core/src/main/java/org/elasticsearch/action/indexedscripts/put/PutIndexedScriptRequest.java index 76b2dcc0522..82d0f9b7bb0 100644 --- a/core/src/main/java/org/elasticsearch/action/indexedscripts/put/PutIndexedScriptRequest.java +++ b/core/src/main/java/org/elasticsearch/action/indexedscripts/put/PutIndexedScriptRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.indexedscripts.put; -import java.nio.charset.StandardCharsets; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -40,6 +39,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.script.ScriptService; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java index 677f2aa88e6..13c2526a7a3 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java @@ -35,7 +35,11 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.highlight.HighlightField; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; /** * Encapsulates the response of a percolator request. diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java index 79f51db59f7..b080039ed0d 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java index 743b4dfe27b..bf7b9e5307b 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java @@ -22,7 +22,12 @@ package org.elasticsearch.action.percolate; import com.carrotsearch.hppc.IntArrayList; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.UnavailableShardsException; -import org.elasticsearch.action.get.*; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.get.MultiGetItemResponse; +import org.elasticsearch.action.get.MultiGetRequest; +import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.get.TransportMultiGetAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; @@ -42,7 +47,11 @@ import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReferenceArray; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java index 68bcdc1503d..1d29e6c3971 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.percolate; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; @@ -37,8 +36,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.StringText; -import org.elasticsearch.common.text.Text; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.threadpool.ThreadPool; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java index 40e8b0730ff..ebf3144a540 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java @@ -28,7 +28,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; /** * diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 442b0915e3b..52d45ec9407 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -473,6 +473,14 @@ public class SearchRequestBuilder extends ActionRequestBuilderfalse + */ + public SearchRequestBuilder setProfile(boolean profile) { + sourceBuilder().profile(profile); + return this; + } + @Override public String toString() { if (request.source() != null) { diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 769e0978a71..e6681bf2b9f 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; @@ -32,9 +33,12 @@ import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; +import java.util.List; +import java.util.Map; import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; import static org.elasticsearch.search.internal.InternalSearchResponse.readInternalSearchResponse; @@ -160,6 +164,16 @@ public class SearchResponse extends ActionResponse implements StatusToXContent { this.scrollId = scrollId; } + /** + * If profiling was enabled, this returns an object containing the profile results from + * each shard. If profiling was not enabled, this will return null + * + * @return The profile results or null + */ + public @Nullable Map> getProfileResults() { + return internalResponse.profile(); + } + static final class Fields { static final XContentBuilderString _SCROLL_ID = new XContentBuilderString("_scroll_id"); static final XContentBuilderString TOOK = new XContentBuilderString("took"); diff --git a/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java b/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java index 79d0de4884e..0139186562c 100644 --- a/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java +++ b/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java @@ -25,15 +25,12 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; import static org.elasticsearch.search.SearchShardTarget.readSearchShardTarget; diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 3177c676dc3..c106cd1d4e5 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -20,7 +20,10 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.type.*; +import org.elasticsearch.action.search.type.TransportSearchDfsQueryAndFetchAction; +import org.elasticsearch.action.search.type.TransportSearchDfsQueryThenFetchAction; +import org.elasticsearch.action.search.type.TransportSearchQueryAndFetchAction; +import org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterService; @@ -36,7 +39,8 @@ import org.elasticsearch.transport.TransportService; import java.util.Map; import java.util.Set; -import static org.elasticsearch.action.search.SearchType.*; +import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH; +import static org.elasticsearch.action.search.SearchType.QUERY_AND_FETCH; /** * diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 2f4ce5dcc4a..445dec114ee 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -31,7 +31,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import static org.elasticsearch.action.search.type.ParsedScrollId.*; +import static org.elasticsearch.action.search.type.ParsedScrollId.QUERY_AND_FETCH_TYPE; +import static org.elasticsearch.action.search.type.ParsedScrollId.QUERY_THEN_FETCH_TYPE; import static org.elasticsearch.action.search.type.TransportSearchHelper.parseScrollId; /** diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java index 20bb205fef5..faaf1215542 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.search.type; import com.carrotsearch.hppc.IntArrayList; - import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java index c23e5b70c15..1d8589e7a96 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.search.type; import com.carrotsearch.hppc.IntArrayList; - import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java index 8f2df714319..8dd9c139c33 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.search.type; import com.carrotsearch.hppc.IntArrayList; - import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.ReduceSearchPhaseException; diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java index 31cd3986d2f..9d1004ccd5c 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.search.type; import com.carrotsearch.hppc.IntArrayList; - import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.elasticsearch.action.ActionListener; @@ -55,7 +54,9 @@ import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; import java.util.List; import java.util.Map; @@ -78,7 +79,7 @@ public abstract class TransportSearchTypeAction extends TransportAction REQUIRES_NAME_SETTING = Setting.boolSetting("action.destructive_requires_name", false, true, Setting.Scope.CLUSTER); private volatile boolean destructiveRequiresName; @Inject - public DestructiveOperations(Settings settings, NodeSettingsService nodeSettingsService) { + public DestructiveOperations(Settings settings, ClusterSettings clusterSettings) { super(settings); - destructiveRequiresName = settings.getAsBoolean(DestructiveOperations.REQUIRES_NAME, false); - nodeSettingsService.addListener(this); + destructiveRequiresName = REQUIRES_NAME_SETTING.get(settings); + clusterSettings.addSettingsUpdateConsumer(REQUIRES_NAME_SETTING, this::setDestructiveRequiresName); + } + + private void setDestructiveRequiresName(boolean destructiveRequiresName) { + this.destructiveRequiresName = destructiveRequiresName; } /** @@ -65,15 +70,6 @@ public final class DestructiveOperations extends AbstractComponent implements No } } - @Override - public void onRefreshSettings(Settings settings) { - boolean newValue = settings.getAsBoolean(DestructiveOperations.REQUIRES_NAME, destructiveRequiresName); - if (destructiveRequiresName != newValue) { - logger.info("updating [action.operate_all_indices] from [{}] to [{}]", destructiveRequiresName, newValue); - this.destructiveRequiresName = newValue; - } - } - private static boolean hasWildcardUsage(String aliasOrIndex) { return "_all".equals(aliasOrIndex) || aliasOrIndex.indexOf('*') != -1; } diff --git a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index 3a00dbf81dc..bd9556f0500 100644 --- a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; @@ -36,14 +37,19 @@ import java.util.function.Supplier; public abstract class HandledTransportAction extends TransportAction{ protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier request) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver); + super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, request, ThreadPool.Names.SAME, new TransportHandler()); } class TransportHandler implements TransportRequestHandler { @Override - public final void messageReceived(final Request request, final TransportChannel channel) throws Exception { + public final void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { + messageReceived(request, channel); + } + + @Override + public final void messageReceived(Request request, TransportChannel channel) throws Exception { execute(request, new ActionListener() { @Override public void onResponse(Response response) { diff --git a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java index d278a992e93..3e0454550ba 100644 --- a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -19,12 +19,18 @@ package org.elasticsearch.action.support; -import org.elasticsearch.action.*; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import java.util.concurrent.atomic.AtomicInteger; @@ -41,15 +47,17 @@ public abstract class TransportAction execute(Request request) { @@ -59,6 +67,28 @@ public abstract class TransportAction listener) { + Task task = taskManager.register("transport", actionName, request); + if (task == null) { + execute(null, request, listener); + } else { + execute(task, request, new ActionListener() { + @Override + public void onResponse(Response response) { + taskManager.unregister(task); + listener.onResponse(response); + } + + @Override + public void onFailure(Throwable e) { + taskManager.unregister(task); + listener.onFailure(e); + } + }); + } + } + + private final void execute(Task task, Request request, ActionListener listener) { + ActionRequestValidationException validationException = request.validate(); if (validationException != null) { listener.onFailure(validationException); @@ -67,17 +97,21 @@ public abstract class TransportAction(this, logger); - requestFilterChain.proceed(actionName, request, listener); + requestFilterChain.proceed(task, actionName, request, listener); } } + protected void doExecute(Task task, Request request, ActionListener listener) { + doExecute(request, listener); + } + protected abstract void doExecute(Request request, ActionListener listener); private static class RequestFilterChain implements ActionFilterChain { @@ -92,13 +126,13 @@ public abstract class TransportAction(actionName, listener, new ResponseFilterChain(this.action.filters, logger))); + this.action.doExecute(task, (Request) request, new FilteredActionListener(actionName, listener, new ResponseFilterChain(this.action.filters, logger))); } else { listener.onFailure(new IllegalStateException("proceed was called too many times")); } @@ -127,7 +161,7 @@ public abstract class TransportAction listener) throws Exception; + /** + * Override this operation if access to the task parameter is needed + */ + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { + masterOperation(request, state, listener); + } + protected boolean localExecute(Request request) { return false; } @@ -91,8 +99,14 @@ public abstract class TransportMasterNodeAction listener) { - new AsyncSingleAction(request, listener).start(); + protected final void doExecute(final Request request, ActionListener listener) { + logger.warn("attempt to execute a master node operation without task"); + throw new UnsupportedOperationException("task parameter is required for this operation"); + } + + @Override + protected void doExecute(Task task, final Request request, ActionListener listener) { + new AsyncSingleAction(task, request, listener).start(); } class AsyncSingleAction { @@ -100,6 +114,7 @@ public abstract class TransportMasterNodeAction listener; private final Request request; private volatile ClusterStateObserver observer; + private final Task task; private final ClusterStateObserver.ChangePredicate retryableOrNoBlockPredicate = new ClusterStateObserver.ValidationPredicate() { @Override @@ -109,7 +124,8 @@ public abstract class TransportMasterNodeAction listener) { + AsyncSingleAction(Task task, Request request, ActionListener listener) { + this.task = task; this.request = request; // TODO do we really need to wrap it in a listener? the handlers should be cheap if ((listener instanceof ThreadedActionListener) == false) { @@ -157,7 +173,7 @@ public abstract class TransportMasterNodeAction listener) { - new AsyncAction(request, listener).start(); + protected final void doExecute(NodesRequest request, ActionListener listener) { + logger.warn("attempt to execute a transport nodes operation without a task"); + throw new UnsupportedOperationException("task parameter is required for this operation"); + } + + @Override + protected void doExecute(Task task, NodesRequest request, ActionListener listener) { + new AsyncAction(task, request, listener).start(); } protected boolean transportCompress() { @@ -100,8 +114,10 @@ public abstract class TransportNodesAction listener; private final AtomicReferenceArray responses; private final AtomicInteger counter = new AtomicInteger(); + private final Task task; - private AsyncAction(NodesRequest request, ActionListener listener) { + private AsyncAction(Task task, NodesRequest request, ActionListener listener) { + this.task = task; this.request = request; this.listener = listener; ClusterState clusterState = clusterService.state(); @@ -144,7 +160,11 @@ public abstract class TransportNodesAction() { @Override public NodeResponse newInstance() { diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 26c439c0a3d..6fd7da91645 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -40,7 +40,9 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; @@ -59,7 +61,17 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.ReceiveTimeoutTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportChannelResponseHandler; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Collections; @@ -102,7 +114,7 @@ public abstract class TransportReplicationAction request, Supplier replicaRequest, String executor) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver); + super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); this.transportService = transportService; this.clusterService = clusterService; this.indicesService = indicesService; @@ -300,11 +312,15 @@ public abstract class TransportReplicationAction handler = TransportChannelResponseHandler.emptyResponseHandler(logger, channel, extraMessage); + transportService.sendRequest(clusterService.localNode(), transportReplicaAction, request, handler); } @Override @@ -866,7 +882,7 @@ public abstract class TransportReplicationAction request, String executor) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver); + super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); this.clusterService = clusterService; this.transportService = transportService; @@ -173,7 +177,7 @@ public abstract class TransportSingleShardAction extends ActionRequest { + + + public static final String[] ALL_ACTIONS = Strings.EMPTY_ARRAY; + + public static final String[] ALL_NODES = Strings.EMPTY_ARRAY; + + public static final long ALL_TASKS = -1L; + + private String[] nodesIds = ALL_NODES; + + private TimeValue timeout; + + private String[] actions = ALL_ACTIONS; + + private String parentNode; + + private long parentTaskId = ALL_TASKS; + + public BaseTasksRequest() { + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + /** + * Get information about tasks from nodes based on the nodes ids specified. + * If none are passed, information for all nodes will be returned. + */ + public BaseTasksRequest(ActionRequest request, String... nodesIds) { + super(request); + this.nodesIds = nodesIds; + } + + /** + * Get information about tasks from nodes based on the nodes ids specified. + * If none are passed, information for all nodes will be returned. + */ + public BaseTasksRequest(String... nodesIds) { + this.nodesIds = nodesIds; + } + + /** + * Sets the list of action masks for the actions that should be returned + */ + @SuppressWarnings("unchecked") + public final T actions(String... actions) { + this.actions = actions; + return (T) this; + } + + /** + * Return the list of action masks for the actions that should be returned + */ + public String[] actions() { + return actions; + } + + public final String[] nodesIds() { + return nodesIds; + } + + @SuppressWarnings("unchecked") + public final T nodesIds(String... nodesIds) { + this.nodesIds = nodesIds; + return (T) this; + } + + /** + * Returns the parent node id that tasks should be filtered by + */ + public String parentNode() { + return parentNode; + } + + @SuppressWarnings("unchecked") + public T parentNode(String parentNode) { + this.parentNode = parentNode; + return (T) this; + } + + /** + * Returns the parent task id that tasks should be filtered by + */ + public long parentTaskId() { + return parentTaskId; + } + + @SuppressWarnings("unchecked") + public T parentTaskId(long parentTaskId) { + this.parentTaskId = parentTaskId; + return (T) this; + } + + + public TimeValue timeout() { + return this.timeout; + } + + @SuppressWarnings("unchecked") + public final T timeout(TimeValue timeout) { + this.timeout = timeout; + return (T) this; + } + + @SuppressWarnings("unchecked") + public final T timeout(String timeout) { + this.timeout = TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout"); + return (T) this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + nodesIds = in.readStringArray(); + actions = in.readStringArray(); + parentNode = in.readOptionalString(); + parentTaskId = in.readLong(); + if (in.readBoolean()) { + timeout = TimeValue.readTimeValue(in); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArrayNullable(nodesIds); + out.writeStringArrayNullable(actions); + out.writeOptionalString(parentNode); + out.writeLong(parentTaskId); + out.writeOptionalStreamable(timeout); + } + + public boolean match(Task task) { + if (actions() != null && actions().length > 0 && Regex.simpleMatch(actions(), task.getAction()) == false) { + return false; + } + if (parentNode() != null || parentTaskId() != BaseTasksRequest.ALL_TASKS) { + if (task instanceof ChildTask) { + if (parentNode() != null) { + if (parentNode().equals(((ChildTask) task).getParentNode()) == false) { + return false; + } + } + if (parentTaskId() != BaseTasksRequest.ALL_TASKS) { + if (parentTaskId() != ((ChildTask) task).getParentId()) { + return false; + } + } + } else { + // This is not a child task and we need to match parent node or id + return false; + } + } + return true; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java new file mode 100644 index 00000000000..43be2b46db1 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support.tasks; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + + +/** + * Base class for responses of task-related operations + */ +public class BaseTasksResponse extends ActionResponse { + private List taskFailures; + private List nodeFailures; + + public BaseTasksResponse() { + } + + public BaseTasksResponse(List taskFailures, List nodeFailures) { + this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(taskFailures)); + this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(nodeFailures)); + } + + /** + * The list of task failures exception. + */ + public List getTaskFailures() { + return taskFailures; + } + + /** + * The list of node failures exception. + */ + public List getNodeFailures() { + return nodeFailures; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + int size = in.readVInt(); + List taskFailures = new ArrayList<>(); + for (int i = 0; i < size; i++) { + taskFailures.add(new TaskOperationFailure(in)); + } + size = in.readVInt(); + this.taskFailures = Collections.unmodifiableList(taskFailures); + List nodeFailures = new ArrayList<>(); + for (int i = 0; i < size; i++) { + nodeFailures.add(new FailedNodeException(in)); + } + this.nodeFailures = Collections.unmodifiableList(nodeFailures); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(taskFailures.size()); + for (TaskOperationFailure exp : taskFailures) { + exp.writeTo(out); + } + out.writeVInt(nodeFailures.size()); + for (FailedNodeException exp : nodeFailures) { + exp.writeTo(out); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java new file mode 100644 index 00000000000..a7265ce9998 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.support.tasks; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.unit.TimeValue; + +/** + * Builder for task-based requests + */ +public class TasksRequestBuilder , Response extends BaseTasksResponse, RequestBuilder extends TasksRequestBuilder> + extends ActionRequestBuilder { + + protected TasksRequestBuilder(ElasticsearchClient client, Action action, Request request) { + super(client, action, request); + } + + @SuppressWarnings("unchecked") + public final RequestBuilder setNodesIds(String... nodesIds) { + request.nodesIds(nodesIds); + return (RequestBuilder) this; + } + + @SuppressWarnings("unchecked") + public final RequestBuilder setActions(String... actions) { + request.actions(actions); + return (RequestBuilder) this; + } + + @SuppressWarnings("unchecked") + public final RequestBuilder setTimeout(TimeValue timeout) { + request.timeout(timeout); + return (RequestBuilder) this; + } +} + diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java new file mode 100644 index 00000000000..42be7e4eefc --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -0,0 +1,380 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support.tasks; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.NoSuchNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ChildTaskRequest; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.NodeShouldNotConnectException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReferenceArray; +import java.util.function.Supplier; + +/** + * The base class for transport actions that are interacting with currently running tasks. + */ +public abstract class TransportTasksAction< + TasksRequest extends BaseTasksRequest, + TasksResponse extends BaseTasksResponse, + TaskResponse extends Writeable + > extends HandledTransportAction { + + protected final ClusterName clusterName; + protected final ClusterService clusterService; + protected final TransportService transportService; + protected final Supplier requestSupplier; + protected final Supplier responseSupplier; + + protected final String transportNodeAction; + + protected TransportTasksAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool, + ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, Supplier requestSupplier, + Supplier responseSupplier, + String nodeExecutor) { + super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, requestSupplier); + this.clusterName = clusterName; + this.clusterService = clusterService; + this.transportService = transportService; + this.transportNodeAction = actionName + "[n]"; + this.requestSupplier = requestSupplier; + this.responseSupplier = responseSupplier; + + transportService.registerRequestHandler(transportNodeAction, NodeTaskRequest::new, nodeExecutor, new NodeTransportHandler()); + } + + @Override + protected final void doExecute(TasksRequest request, ActionListener listener) { + logger.warn("attempt to execute a transport tasks operation without a task"); + throw new UnsupportedOperationException("task parameter is required for this operation"); + } + + @Override + protected void doExecute(Task task, TasksRequest request, ActionListener listener) { + new AsyncAction(task, request, listener).start(); + } + + private NodeTasksResponse nodeOperation(NodeTaskRequest nodeTaskRequest) { + TasksRequest request = nodeTaskRequest.tasksRequest; + List results = new ArrayList<>(); + List exceptions = new ArrayList<>(); + for (Task task : taskManager.getTasks().values()) { + // First check action and node filters + if (request.match(task)) { + try { + results.add(taskOperation(request, task)); + } catch (Exception ex) { + exceptions.add(new TaskOperationFailure(clusterService.localNode().id(), task.getId(), ex)); + } + } + } + return new NodeTasksResponse(clusterService.localNode().id(), results, exceptions); + } + + protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) { + return nodesIds; + } + + protected String[] resolveNodes(TasksRequest request, ClusterState clusterState) { + return clusterState.nodes().resolveNodesIds(request.nodesIds()); + } + + protected abstract TasksResponse newResponse(TasksRequest request, List tasks, List taskOperationFailures, List failedNodeExceptions); + + @SuppressWarnings("unchecked") + protected TasksResponse newResponse(TasksRequest request, AtomicReferenceArray responses) { + List tasks = new ArrayList<>(); + List failedNodeExceptions = new ArrayList<>(); + List taskOperationFailures = new ArrayList<>(); + for (int i = 0; i < responses.length(); i++) { + Object response = responses.get(i); + if (response instanceof FailedNodeException) { + failedNodeExceptions.add((FailedNodeException) response); + } else { + NodeTasksResponse tasksResponse = (NodeTasksResponse) response; + if (tasksResponse.results != null) { + tasks.addAll(tasksResponse.results); + } + if (tasksResponse.exceptions != null) { + taskOperationFailures.addAll(tasksResponse.exceptions); + } + } + } + return newResponse(request, tasks, taskOperationFailures, failedNodeExceptions); + } + + protected abstract TaskResponse readTaskResponse(StreamInput in) throws IOException; + + protected abstract TaskResponse taskOperation(TasksRequest request, Task task); + + protected boolean transportCompress() { + return false; + } + + protected abstract boolean accumulateExceptions(); + + private class AsyncAction { + + private final TasksRequest request; + private final String[] nodesIds; + private final DiscoveryNode[] nodes; + private final ActionListener listener; + private final AtomicReferenceArray responses; + private final AtomicInteger counter = new AtomicInteger(); + private final Task task; + + private AsyncAction(Task task, TasksRequest request, ActionListener listener) { + this.task = task; + this.request = request; + this.listener = listener; + ClusterState clusterState = clusterService.state(); + String[] nodesIds = resolveNodes(request, clusterState); + this.nodesIds = filterNodeIds(clusterState.nodes(), nodesIds); + ImmutableOpenMap nodes = clusterState.nodes().nodes(); + this.nodes = new DiscoveryNode[nodesIds.length]; + for (int i = 0; i < nodesIds.length; i++) { + this.nodes[i] = nodes.get(nodesIds[i]); + } + this.responses = new AtomicReferenceArray<>(this.nodesIds.length); + } + + private void start() { + if (nodesIds.length == 0) { + // nothing to do + try { + listener.onResponse(newResponse(request, responses)); + } catch (Throwable t) { + logger.debug("failed to generate empty response", t); + listener.onFailure(t); + } + } else { + TransportRequestOptions.Builder builder = TransportRequestOptions.builder(); + if (request.timeout() != null) { + builder.withTimeout(request.timeout()); + } + builder.withCompress(transportCompress()); + for (int i = 0; i < nodesIds.length; i++) { + final String nodeId = nodesIds[i]; + final int idx = i; + final DiscoveryNode node = nodes[i]; + try { + if (node == null) { + onFailure(idx, nodeId, new NoSuchNodeException(nodeId)); + } else if (!clusterService.localNode().shouldConnectTo(node) && !clusterService.localNode().equals(node)) { + // the check "!clusterService.localNode().equals(node)" is to maintain backward comp. where before + // we allowed to connect from "local" client node to itself, certain tests rely on it, if we remove it, we need to fix + // those (and they randomize the client node usage, so tricky to find when) + onFailure(idx, nodeId, new NodeShouldNotConnectException(clusterService.localNode(), node)); + } else { + NodeTaskRequest nodeRequest = new NodeTaskRequest(request); + nodeRequest.setParentTask(clusterService.localNode().id(), task.getId()); + transportService.sendRequest(node, transportNodeAction, nodeRequest, builder.build(), new BaseTransportResponseHandler() { + @Override + public NodeTasksResponse newInstance() { + return new NodeTasksResponse(); + } + + @Override + public void handleResponse(NodeTasksResponse response) { + onOperation(idx, response); + } + + @Override + public void handleException(TransportException exp) { + onFailure(idx, node.id(), exp); + } + + @Override + public String executor() { + return ThreadPool.Names.SAME; + } + }); + } + } catch (Throwable t) { + onFailure(idx, nodeId, t); + } + } + } + } + + private void onOperation(int idx, NodeTasksResponse nodeResponse) { + responses.set(idx, nodeResponse); + if (counter.incrementAndGet() == responses.length()) { + finishHim(); + } + } + + private void onFailure(int idx, String nodeId, Throwable t) { + if (logger.isDebugEnabled() && !(t instanceof NodeShouldNotConnectException)) { + logger.debug("failed to execute on node [{}]", t, nodeId); + } + if (accumulateExceptions()) { + responses.set(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t)); + } + if (counter.incrementAndGet() == responses.length()) { + finishHim(); + } + } + + private void finishHim() { + TasksResponse finalResponse; + try { + finalResponse = newResponse(request, responses); + } catch (Throwable t) { + logger.debug("failed to combine responses from nodes", t); + listener.onFailure(t); + return; + } + listener.onResponse(finalResponse); + } + } + + class NodeTransportHandler implements TransportRequestHandler { + + @Override + public void messageReceived(final NodeTaskRequest request, final TransportChannel channel) throws Exception { + channel.sendResponse(nodeOperation(request)); + } + } + + + private class NodeTaskRequest extends ChildTaskRequest { + private TasksRequest tasksRequest; + + protected NodeTaskRequest() { + super(); + } + + protected NodeTaskRequest(TasksRequest tasksRequest) { + super(tasksRequest); + this.tasksRequest = tasksRequest; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + tasksRequest = requestSupplier.get(); + tasksRequest.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + tasksRequest.writeTo(out); + } + } + + private class NodeTasksResponse extends TransportResponse { + protected String nodeId; + protected List exceptions; + protected List results; + + public NodeTasksResponse() { + } + + public NodeTasksResponse(String nodeId, + List results, + List exceptions) { + this.nodeId = nodeId; + this.results = results; + this.exceptions = exceptions; + } + + public String getNodeId() { + return nodeId; + } + + public List getExceptions() { + return exceptions; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + nodeId = in.readString(); + int resultsSize = in.readVInt(); + results = new ArrayList<>(resultsSize); + for (; resultsSize > 0; resultsSize--) { + final TaskResponse result = in.readBoolean() ? readTaskResponse(in) : null; + results.add(result); + } + if (in.readBoolean()) { + int taskFailures = in.readVInt(); + exceptions = new ArrayList<>(taskFailures); + for (int i = 0; i < taskFailures; i++) { + exceptions.add(new TaskOperationFailure(in)); + } + } else { + exceptions = null; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(nodeId); + out.writeVInt(results.size()); + for (TaskResponse result : results) { + if (result != null) { + out.writeBoolean(true); + result.writeTo(out); + } else { + out.writeBoolean(false); + } + } + out.writeBoolean(exceptions != null); + if (exceptions != null) { + int taskFailures = exceptions.size(); + out.writeVInt(taskFailures); + for (TaskOperationFailure exception : exceptions) { + exception.writeTo(out); + } + } + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java index d4451157c4b..9cc328c2be7 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.termvectors; import org.elasticsearch.action.Action; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; /** diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java index 6d880a7b176..a8812fa8d1a 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java @@ -20,17 +20,26 @@ package org.elasticsearch.action.termvectors; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.*; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.CompositeIndicesRequest; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.RealtimeRequest; +import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; public class MultiTermVectorsRequest extends ActionRequest implements Iterable, CompositeIndicesRequest, RealtimeRequest { diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java index 7be1061d2f6..0b4152fed5c 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java @@ -26,7 +26,10 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BoostAttribute; -import org.apache.lucene.util.*; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -171,7 +174,7 @@ public final class TermVectorsFields extends Fields { public Terms terms(String field) throws IOException { // first, find where in the termVectors bytes the actual term vector for // this field is stored - final int keySlot = fieldMap.indexOf(field); + final int keySlot = fieldMap.indexOf(field); if (keySlot < 0) { return null; // we don't have it. } diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java index a33e8e2cd42..cdeed093eed 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFilter.java @@ -18,9 +18,13 @@ */ package org.elasticsearch.action.termvectors; -import org.apache.lucene.index.*; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.TermStatistics; -import org.apache.lucene.search.similarities.DefaultSimilarity; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; @@ -63,7 +67,7 @@ public class TermVectorsFilter { this.dfs = dfs; this.scoreTerms = new HashMap<>(); - this.similarity = new DefaultSimilarity(); + this.similarity = new ClassicSimilarity(); } public void setSettings(TermVectorsRequest.FilterSettings settings) { @@ -204,21 +208,21 @@ public class TermVectorsFilter { BytesRef termBytesRef = termsEnum.term(); boolean foundTerm = topLevelTermsEnum.seekExact(termBytesRef); assert foundTerm : "Term: " + termBytesRef.utf8ToString() + " not found!"; - + Term term = new Term(fieldName, termBytesRef); - + // remove noise words int freq = getTermFreq(termsEnum, docsEnum); if (isNoise(term.bytes().utf8ToString(), freq)) { continue; } - + // now call on docFreq long docFreq = getTermStatistics(topLevelTermsEnum, term).docFreq(); if (!isAccepted(docFreq)) { continue; } - + // filter based on score float score = computeScore(docFreq, freq, numDocs); queue.addOrUpdate(new ScoreTerm(term.field(), term.bytes().utf8ToString(), score)); diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index c13e44097bc..7a97a242401 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -65,6 +65,8 @@ public class TermVectorsRequest extends SingleShardRequest i private String routing; + private String parent; + private VersionType versionType = VersionType.INTERNAL; private long version = Versions.MATCH_ANY; @@ -162,6 +164,7 @@ public class TermVectorsRequest extends SingleShardRequest i this.flagsEnum = other.getFlags().clone(); this.preference = other.preference(); this.routing = other.routing(); + this.parent = other.parent(); if (other.selectedFields != null) { this.selectedFields = new HashSet<>(other.selectedFields); } @@ -181,6 +184,7 @@ public class TermVectorsRequest extends SingleShardRequest i this.type = item.type(); this.selectedFields(item.fields()); this.routing(item.routing()); + this.parent(item.parent()); } public EnumSet getFlags() { @@ -259,14 +263,16 @@ public class TermVectorsRequest extends SingleShardRequest i return this; } + @Override + public String parent() { + return parent; + } + /** - * Sets the parent id of this document. Will simply set the routing to this - * value, as it is only used for routing with delete requests. + * Sets the parent id of this document. */ public TermVectorsRequest parent(String parent) { - if (routing == null) { - routing = parent; - } + this.parent = parent; return this; } @@ -506,6 +512,7 @@ public class TermVectorsRequest extends SingleShardRequest i doc = in.readBytesReference(); } routing = in.readOptionalString(); + parent = in.readOptionalString(); preference = in.readOptionalString(); long flags = in.readVLong(); @@ -545,6 +552,7 @@ public class TermVectorsRequest extends SingleShardRequest i out.writeBytesReference(doc); } out.writeOptionalString(routing); + out.writeOptionalString(parent); out.writeOptionalString(preference); long longFlags = 0; for (Flag flag : flagsEnum) { @@ -629,6 +637,8 @@ public class TermVectorsRequest extends SingleShardRequest i termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser)); } else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) { termVectorsRequest.routing = parser.text(); + } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { + termVectorsRequest.parent = parser.text(); } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { termVectorsRequest.version = parser.longValue(); } else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) { diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java index 9bf9957e1e3..c3a474cd21e 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.termvectors; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java index 89a8ff088f6..6b5e497b8e5 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java @@ -18,7 +18,11 @@ */ package org.elasticsearch.action.termvectors; -import org.apache.lucene.index.*; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.TermStatistics; @@ -288,4 +292,4 @@ final class TermVectorsWriter { // further... output.writeVLong(Math.max(0, value + 1)); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index dd78d7a3f65..3943d2e6a67 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -66,7 +66,7 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction() { diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java index b790c21a45a..98d085b9b97 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java @@ -71,8 +71,8 @@ public class TransportTermVectorsAction extends TransportSingleShardAction listener) { - request.routing((state.metaData().resolveIndexRouting(request.routing(), request.index()))); + request.routing((state.metaData().resolveIndexRouting(request.parent(), request.routing(), request.index()))); // Fail fast on the node that received the request, rather than failing when translating on the index or delete request. if (request.routing() == null && state.getMetaData().routingRequired(request.concreteIndex(), request.type())) { throw new RoutingMissingException(request.concreteIndex(), request.type(), request.id()); diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 9f8b2a2e7be..d28ba2986e2 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -50,6 +50,7 @@ import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.lookup.SourceLookup; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -245,7 +246,7 @@ public class UpdateHelper extends AbstractComponent { private Map executeScript(UpdateRequest request, Map ctx) { try { if (scriptService != null) { - ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE, request); + ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE, request, Collections.emptyMap()); script.setNextVar("ctx", ctx); script.run(); // we need to unwrap the ctx... diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index 06df386828e..9e061d29500 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -184,13 +184,10 @@ public class UpdateRequest extends InstanceShardOperationRequest } /** - * The parent id is used for the upsert request and also implicitely sets the routing if not already set. + * The parent id is used for the upsert request. */ public UpdateRequest parent(String parent) { this.parent = parent; - if (routing == null) { - routing = parent; - } return this; } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java b/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java index ee804b1480e..1cd3a9ad57e 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java @@ -23,6 +23,8 @@ import org.elasticsearch.common.SuppressForbidden; import java.net.SocketPermission; import java.net.URL; +import java.io.FilePermission; +import java.io.IOException; import java.security.CodeSource; import java.security.Permission; import java.security.PermissionCollection; @@ -81,10 +83,39 @@ final class ESPolicy extends Policy { } } + // Special handling for broken Hadoop code: "let me execute or my classes will not load" + // yeah right, REMOVE THIS when hadoop is fixed + if (permission instanceof FilePermission && "<>".equals(permission.getName())) { + for (StackTraceElement element : Thread.currentThread().getStackTrace()) { + if ("org.apache.hadoop.util.Shell".equals(element.getClassName()) && + "runCommand".equals(element.getMethodName())) { + // we found the horrible method: the hack begins! + // force the hadoop code to back down, by throwing an exception that it catches. + rethrow(new IOException("no hadoop, you cannot do this.")); + } + } + } + // otherwise defer to template + dynamic file permissions return template.implies(domain, permission) || dynamic.implies(permission) || system.implies(domain, permission); } + /** + * Classy puzzler to rethrow any checked exception as an unchecked one. + */ + private static class Rethrower { + private void rethrow(Throwable t) throws T { + throw (T) t; + } + } + + /** + * Rethrows t (identical object). + */ + private void rethrow(Throwable t) { + new Rethrower().rethrow(t); + } + @Override public PermissionCollection getPermissions(CodeSource codesource) { // code should not rely on this method, or at least use it correctly: diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java b/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java index bff22bc19f5..573f3d5be3e 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java @@ -22,7 +22,6 @@ package org.elasticsearch.bootstrap; import com.sun.jna.Native; import com.sun.jna.NativeLong; import com.sun.jna.Structure; - import org.apache.lucene.util.Constants; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -53,21 +52,21 @@ final class JNACLibrary { static native int mlockall(int flags); static native int geteuid(); - + /** corresponds to struct rlimit */ public static final class Rlimit extends Structure implements Structure.ByReference { public NativeLong rlim_cur = new NativeLong(0); public NativeLong rlim_max = new NativeLong(0); - + @Override protected List getFieldOrder() { return Arrays.asList(new String[] { "rlim_cur", "rlim_max" }); } } - + static native int getrlimit(int resource, Rlimit rlimit); static native int setrlimit(int resource, Rlimit rlimit); - + static native String strerror(int errno); private JNACLibrary() { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java b/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java index 26e485802f4..fbd6857d365 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java @@ -19,9 +19,12 @@ package org.elasticsearch.bootstrap; -import com.sun.jna.*; +import com.sun.jna.IntegerType; +import com.sun.jna.Native; +import com.sun.jna.NativeLong; +import com.sun.jna.Pointer; +import com.sun.jna.Structure; import com.sun.jna.win32.StdCallLibrary; - import org.apache.lucene.util.Constants; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java index 5356d33bb8e..78dbc00ae30 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java @@ -21,7 +21,6 @@ package org.elasticsearch.bootstrap; import com.sun.jna.Native; import com.sun.jna.Pointer; - import org.apache.lucene.util.Constants; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -56,18 +55,18 @@ class JNANatives { boolean rlimitSuccess = false; long softLimit = 0; long hardLimit = 0; - + try { int result = JNACLibrary.mlockall(JNACLibrary.MCL_CURRENT); if (result == 0) { LOCAL_MLOCKALL = true; return; } - + errno = Native.getLastError(); errMsg = JNACLibrary.strerror(errno); if (Constants.LINUX || Constants.MAC_OS_X) { - // we only know RLIMIT_MEMLOCK for these two at the moment. + // we only know RLIMIT_MEMLOCK for these two at the moment. JNACLibrary.Rlimit rlimit = new JNACLibrary.Rlimit(); if (JNACLibrary.getrlimit(JNACLibrary.RLIMIT_MEMLOCK, rlimit) == 0) { rlimitSuccess = true; @@ -103,7 +102,7 @@ class JNANatives { } } } - + static String rlimitToString(long value) { assert Constants.LINUX || Constants.MAC_OS_X; if (value == JNACLibrary.RLIM_INFINITY) { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java b/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java index 9a4a26c74e3..00f60a70a20 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java @@ -26,7 +26,6 @@ import com.sun.jna.NativeLong; import com.sun.jna.Pointer; import com.sun.jna.Structure; import com.sun.jna.ptr.PointerByReference; - import org.apache.lucene.util.Constants; import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.logging.ESLogger; @@ -43,7 +42,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -/** +/** * Installs a limited form of secure computing mode, * to filters system calls to block process execution. *

@@ -54,7 +53,7 @@ import java.util.Map; *

* On Linux BPF Filters are installed using either {@code seccomp(2)} (3.17+) or {@code prctl(2)} (3.5+). {@code seccomp(2)} * is preferred, as it allows filters to be applied to any existing threads in the process, and one motivation - * here is to protect against bugs in the JVM. Otherwise, code will fall back to the {@code prctl(2)} method + * here is to protect against bugs in the JVM. Otherwise, code will fall back to the {@code prctl(2)} method * which will at least protect elasticsearch application threads. *

* Linux BPF filters will return {@code EACCES} (Access Denied) for the following system calls: @@ -99,13 +98,13 @@ final class Seccomp { /** Access to non-standard Linux libc methods */ static interface LinuxLibrary extends Library { - /** - * maps to prctl(2) + /** + * maps to prctl(2) */ int prctl(int option, NativeLong arg2, NativeLong arg3, NativeLong arg4, NativeLong arg5); - /** - * used to call seccomp(2), its too new... - * this is the only way, DONT use it on some other architecture unless you know wtf you are doing + /** + * used to call seccomp(2), its too new... + * this is the only way, DONT use it on some other architecture unless you know wtf you are doing */ NativeLong syscall(NativeLong number, Object... args); }; @@ -124,7 +123,7 @@ final class Seccomp { } linux_libc = lib; } - + /** the preferred method is seccomp(2), since we can apply to all threads of the process */ static final int SECCOMP_SET_MODE_FILTER = 1; // since Linux 3.17 static final int SECCOMP_FILTER_FLAG_TSYNC = 1; // since Linux 3.17 @@ -135,7 +134,7 @@ final class Seccomp { static final int PR_GET_SECCOMP = 21; // since Linux 2.6.23 static final int PR_SET_SECCOMP = 22; // since Linux 2.6.23 static final long SECCOMP_MODE_FILTER = 2; // since Linux Linux 3.5 - + /** corresponds to struct sock_filter */ static final class SockFilter { short code; // insn @@ -150,12 +149,12 @@ final class Seccomp { this.k = k; } } - + /** corresponds to struct sock_fprog */ public static final class SockFProg extends Structure implements Structure.ByReference { public short len; // number of filters public Pointer filter; // filters - + public SockFProg(SockFilter filters[]) { len = (short) filters.length; // serialize struct sock_filter * explicitly, its less confusing than the JNA magic we would need @@ -170,13 +169,13 @@ final class Seccomp { } this.filter = filter; } - + @Override protected List getFieldOrder() { return Arrays.asList(new String[] { "len", "filter" }); } } - + // BPF "macros" and constants static final int BPF_LD = 0x00; static final int BPF_W = 0x00; @@ -187,15 +186,15 @@ final class Seccomp { static final int BPF_JGT = 0x20; static final int BPF_RET = 0x06; static final int BPF_K = 0x00; - + static SockFilter BPF_STMT(int code, int k) { return new SockFilter((short) code, (byte) 0, (byte) 0, k); } - + static SockFilter BPF_JUMP(int code, int k, int jt, int jf) { return new SockFilter((short) code, (byte) jt, (byte) jf, k); } - + static final int SECCOMP_RET_ERRNO = 0x00050000; static final int SECCOMP_RET_DATA = 0x0000FFFF; static final int SECCOMP_RET_ALLOW = 0x7FFF0000; @@ -260,13 +259,13 @@ final class Seccomp { /** try to install our BPF filters via seccomp() or prctl() to block execution */ private static int linuxImpl() { // first be defensive: we can give nice errors this way, at the very least. - // also, some of these security features get backported to old versions, checking kernel version here is a big no-no! + // also, some of these security features get backported to old versions, checking kernel version here is a big no-no! final Arch arch = ARCHITECTURES.get(Constants.OS_ARCH); boolean supported = Constants.LINUX && arch != null; if (supported == false) { throw new UnsupportedOperationException("seccomp unavailable: '" + Constants.OS_ARCH + "' architecture unsupported"); } - + // we couldn't link methods, could be some really ancient kernel (e.g. < 2.1.57) or some bug if (linux_libc == null) { throw new UnsupportedOperationException("seccomp unavailable: could not link methods. requires kernel 3.5+ with CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER compiled in"); @@ -364,12 +363,12 @@ final class Seccomp { if (linux_prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0) != 0) { throw new UnsupportedOperationException("prctl(PR_SET_NO_NEW_PRIVS): " + JNACLibrary.strerror(Native.getLastError())); } - + // check it worked if (linux_prctl(PR_GET_NO_NEW_PRIVS, 0, 0, 0, 0) != 1) { throw new UnsupportedOperationException("seccomp filter did not really succeed: prctl(PR_GET_NO_NEW_PRIVS): " + JNACLibrary.strerror(Native.getLastError())); } - + // BPF installed to check arch, limit, then syscall. See https://www.kernel.org/doc/Documentation/prctl/seccomp_filter.txt for details. SockFilter insns[] = { /* 1 */ BPF_STMT(BPF_LD + BPF_W + BPF_ABS, SECCOMP_DATA_ARCH_OFFSET), // @@ -399,11 +398,11 @@ final class Seccomp { } if (linux_prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, pointer, 0, 0) != 0) { int errno2 = Native.getLastError(); - throw new UnsupportedOperationException("seccomp(SECCOMP_SET_MODE_FILTER): " + JNACLibrary.strerror(errno1) + + throw new UnsupportedOperationException("seccomp(SECCOMP_SET_MODE_FILTER): " + JNACLibrary.strerror(errno1) + ", prctl(PR_SET_SECCOMP): " + JNACLibrary.strerror(errno2)); } } - + // now check that the filter was really installed, we should be in filter mode. if (linux_prctl(PR_GET_SECCOMP, 0, 0, 0, 0) != 2) { throw new UnsupportedOperationException("seccomp filter installation did not really succeed. seccomp(PR_GET_SECCOMP): " + JNACLibrary.strerror(Native.getLastError())); @@ -486,12 +485,12 @@ final class Seccomp { } } } - + // Solaris implementation via priv_set(3C) /** Access to non-standard Solaris libc methods */ static interface SolarisLibrary extends Library { - /** + /** * see priv_set(3C), a convenience method for setppriv(2). */ int priv_set(int op, String which, String... privs); @@ -511,7 +510,7 @@ final class Seccomp { } libc_solaris = lib; } - + // constants for priv_set(2) static final int PRIV_OFF = 1; static final String PRIV_ALLSETS = null; @@ -531,7 +530,7 @@ final class Seccomp { throw new UnsupportedOperationException("priv_set unavailable: could not link methods. requires Solaris 10+"); } - // drop a null-terminated list of privileges + // drop a null-terminated list of privileges if (libc_solaris.priv_set(PRIV_OFF, PRIV_ALLSETS, PRIV_PROC_FORK, PRIV_PROC_EXEC, null) != 0) { throw new UnsupportedOperationException("priv_set unavailable: priv_set(): " + JNACLibrary.strerror(Native.getLastError())); } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java index 2d342eb5743..43ad73b5dea 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -29,7 +29,8 @@ import org.elasticsearch.http.netty.NettyHttpServerTransport; import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.transport.netty.NettyTransport; -import java.io.*; +import java.io.FilePermission; +import java.io.IOException; import java.net.SocketPermission; import java.net.URISyntaxException; import java.net.URL; @@ -49,7 +50,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -/** +/** * Initializes SecurityManager with necessary permissions. *
*

Initialization

@@ -105,8 +106,8 @@ import java.util.Map; final class Security { /** no instantiation */ private Security() {} - - /** + + /** * Initializes SecurityManager for the environment * Can only happen once! * @param environment configuration for generating dynamic permissions @@ -266,11 +267,11 @@ final class Security { policy.add(new FilePermission(environment.pidFile().toString(), "delete")); } } - + static void addBindPermissions(Permissions policy, Settings settings) throws IOException { // http is simple - String httpRange = settings.get("http.netty.port", - settings.get("http.port", + String httpRange = settings.get("http.netty.port", + settings.get("http.port", NettyHttpServerTransport.DEFAULT_PORT_RANGE)); // listen is always called with 'localhost' but use wildcard to be sure, no name service is consulted. // see SocketPermission implies() code @@ -287,8 +288,8 @@ final class Security { for (Map.Entry entry : profiles.entrySet()) { Settings profileSettings = entry.getValue(); String name = entry.getKey(); - String transportRange = profileSettings.get("port", - settings.get("transport.tcp.port", + String transportRange = profileSettings.get("port", + settings.get("transport.tcp.port", NettyTransport.DEFAULT_PORT_RANGE)); // a profile is only valid if its the default profile, or if it has an actual name and specifies a port @@ -300,7 +301,7 @@ final class Security { } } } - + /** * Add access to path (and all files underneath it) * @param policy current policy to add permissions to @@ -320,7 +321,7 @@ final class Security { policy.add(new FilePermission(path.toString(), permissions)); policy.add(new FilePermission(path.toString() + path.getFileSystem().getSeparator() + "-", permissions)); } - + /** * Ensures configured directory {@code path} exists. * @throws IOException if {@code path} exists, but is not a directory, not accessible, or broken symbolic link. diff --git a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java index cdcff7810df..dc050e0b6c6 100644 --- a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java +++ b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java @@ -32,7 +32,10 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; import java.util.Locale; -import static org.elasticsearch.common.recycler.Recyclers.*; +import static org.elasticsearch.common.recycler.Recyclers.concurrent; +import static org.elasticsearch.common.recycler.Recyclers.concurrentDeque; +import static org.elasticsearch.common.recycler.Recyclers.dequeFactory; +import static org.elasticsearch.common.recycler.Recyclers.none; /** A recycler of fixed-size pages. */ public class PageCacheRecycler extends AbstractComponent { diff --git a/core/src/main/java/org/elasticsearch/client/Client.java b/core/src/main/java/org/elasticsearch/client/Client.java index a396e183bb7..e7461dabfe1 100644 --- a/core/src/main/java/org/elasticsearch/client/Client.java +++ b/core/src/main/java/org/elasticsearch/client/Client.java @@ -33,7 +33,12 @@ import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.fieldstats.FieldStatsRequest; import org.elasticsearch.action.fieldstats.FieldStatsRequestBuilder; import org.elasticsearch.action.fieldstats.FieldStatsResponse; -import org.elasticsearch.action.get.*; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetRequestBuilder; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.get.MultiGetRequest; +import org.elasticsearch.action.get.MultiGetRequestBuilder; +import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; @@ -46,12 +51,32 @@ import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptResponse; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequestBuilder; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; -import org.elasticsearch.action.percolate.*; -import org.elasticsearch.action.search.*; +import org.elasticsearch.action.percolate.MultiPercolateRequest; +import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; +import org.elasticsearch.action.percolate.MultiPercolateResponse; +import org.elasticsearch.action.percolate.PercolateRequest; +import org.elasticsearch.action.percolate.PercolateRequestBuilder; +import org.elasticsearch.action.percolate.PercolateResponse; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollRequestBuilder; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchRequestBuilder; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.SearchScrollRequestBuilder; import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; -import org.elasticsearch.action.termvectors.*; +import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; +import org.elasticsearch.action.termvectors.MultiTermVectorsRequestBuilder; +import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; +import org.elasticsearch.action.termvectors.TermVectorsRequest; +import org.elasticsearch.action.termvectors.TermVectorsRequestBuilder; +import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; @@ -414,7 +439,7 @@ public interface Client extends ElasticsearchClient, Releasable { * Performs multiple search requests. */ MultiSearchRequestBuilder prepareMultiSearch(); - + /** * An action that returns the term vectors for a specific document. * diff --git a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java index 1be22b257e7..2cee4341a39 100644 --- a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java +++ b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java @@ -19,7 +19,8 @@ package org.elasticsearch.client; -import org.elasticsearch.action.*; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; @@ -32,6 +33,9 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryResponse; @@ -248,6 +252,29 @@ public interface ClusterAdminClient extends ElasticsearchClient { */ NodesHotThreadsRequestBuilder prepareNodesHotThreads(String... nodesIds); + /** + * List tasks + * + * @param request The nodes tasks request + * @return The result future + * @see org.elasticsearch.client.Requests#listTasksRequest(String...) + */ + ActionFuture listTasks(ListTasksRequest request); + + /** + * List active tasks + * + * @param request The nodes tasks request + * @param listener A listener to be notified with a result + * @see org.elasticsearch.client.Requests#listTasksRequest(String...) + */ + void listTasks(ListTasksRequest request, ActionListener listener); + + /** + * List active tasks + */ + ListTasksRequestBuilder prepareListTasks(String... nodesIds); + /** * Returns list of shards the given search would be executed on. */ diff --git a/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java b/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java index 08a95bc71e6..4cf5a5a961d 100644 --- a/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java +++ b/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java @@ -20,7 +20,12 @@ package org.elasticsearch.client; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.threadpool.ThreadPool; public interface ElasticsearchClient { diff --git a/core/src/main/java/org/elasticsearch/client/FilterClient.java b/core/src/main/java/org/elasticsearch/client/FilterClient.java index c0a93f5aa05..06d81f0c9d5 100644 --- a/core/src/main/java/org/elasticsearch/client/FilterClient.java +++ b/core/src/main/java/org/elasticsearch/client/FilterClient.java @@ -18,7 +18,11 @@ */ package org.elasticsearch.client; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.support.AbstractClient; diff --git a/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java index 15def3b273e..67205fc0a9a 100644 --- a/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java +++ b/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java @@ -53,13 +53,21 @@ import org.elasticsearch.action.admin.indices.exists.types.TypesExistsResponse; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder; import org.elasticsearch.action.admin.indices.flush.FlushResponse; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequestBuilder; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; +import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.admin.indices.mapping.get.*; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequestBuilder; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequestBuilder; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; @@ -82,8 +90,8 @@ import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoreRequestBuilder; -import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; +import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; @@ -390,6 +398,29 @@ public interface IndicesAdminClient extends ElasticsearchClient { */ FlushRequestBuilder prepareFlush(String... indices); + /** + * Explicitly sync flush one or more indices (write sync id to shards for faster recovery). + * + * @param request The sync flush request + * @return A result future + * @see org.elasticsearch.client.Requests#syncedFlushRequest(String...) + */ + ActionFuture syncedFlush(SyncedFlushRequest request); + + /** + * Explicitly sync flush one or more indices (write sync id to shards for faster recovery). + * + * @param request The sync flush request + * @param listener A listener to be notified with a result + * @see org.elasticsearch.client.Requests#syncedFlushRequest(String...) + */ + void syncedFlush(SyncedFlushRequest request, ActionListener listener); + + /** + * Explicitly sync flush one or more indices (write sync id to shards for faster recovery). + */ + SyncedFlushRequestBuilder prepareSyncedFlush(String... indices); + /** * Explicitly force merge one or more indices into a the number of segments. * diff --git a/core/src/main/java/org/elasticsearch/client/Requests.java b/core/src/main/java/org/elasticsearch/client/Requests.java index 7f0decaba52..7fb6c5c2de0 100644 --- a/core/src/main/java/org/elasticsearch/client/Requests.java +++ b/core/src/main/java/org/elasticsearch/client/Requests.java @@ -22,6 +22,7 @@ package org.elasticsearch.client; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; @@ -43,9 +44,10 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; @@ -131,7 +133,7 @@ public class Requests { public static SuggestRequest suggestRequest(String... indices) { return new SuggestRequest(indices); } - + /** * Creates a search request against one or more indices. Note, the search source must be set either using the * actual JSON search source, or the {@link org.elasticsearch.search.builder.SearchSourceBuilder}. @@ -265,6 +267,17 @@ public class Requests { return new FlushRequest(indices); } + /** + * Creates a synced flush indices request. + * + * @param indices The indices to sync flush. Use null or _all to execute against all indices + * @return The synced flush request + * @see org.elasticsearch.client.IndicesAdminClient#syncedFlush(SyncedFlushRequest) + */ + public static SyncedFlushRequest syncedFlushRequest(String... indices) { + return new SyncedFlushRequest(indices); + } + /** * Creates a force merge request. * @@ -392,6 +405,27 @@ public class Requests { return new ClusterStatsRequest(); } + /** + * Creates a nodes tasks request against all the nodes. + * + * @return The nodes tasks request + * @see org.elasticsearch.client.ClusterAdminClient#listTasks(ListTasksRequest) + */ + public static ListTasksRequest listTasksRequest() { + return new ListTasksRequest(); + } + + /** + * Creates a nodes tasks request against one or more nodes. Pass null or an empty array for all nodes. + * + * @param nodesIds The nodes ids to get the tasks for + * @return The nodes tasks request + * @see org.elasticsearch.client.ClusterAdminClient#nodesStats(org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest) + */ + public static ListTasksRequest listTasksRequest(String... nodesIds) { + return new ListTasksRequest(nodesIds); + } + /** * Registers snapshot repository * diff --git a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java index 74938875648..65adfad64dc 100644 --- a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java +++ b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java @@ -19,7 +19,12 @@ package org.elasticsearch.client.node; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.client.support.Headers; diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 1b5e8539ac6..e085c8da075 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -19,7 +19,12 @@ package org.elasticsearch.client.support; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; @@ -36,6 +41,10 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder; @@ -143,6 +152,10 @@ import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder; import org.elasticsearch.action.admin.indices.flush.FlushResponse; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequestBuilder; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder; @@ -151,7 +164,14 @@ import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.admin.indices.mapping.get.*; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequestBuilder; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequestBuilder; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder; @@ -240,7 +260,14 @@ import org.elasticsearch.action.fieldstats.FieldStatsAction; import org.elasticsearch.action.fieldstats.FieldStatsRequest; import org.elasticsearch.action.fieldstats.FieldStatsRequestBuilder; import org.elasticsearch.action.fieldstats.FieldStatsResponse; -import org.elasticsearch.action.get.*; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetRequestBuilder; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.get.MultiGetAction; +import org.elasticsearch.action.get.MultiGetRequest; +import org.elasticsearch.action.get.MultiGetRequestBuilder; +import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -257,20 +284,52 @@ import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequestBuilder; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; -import org.elasticsearch.action.percolate.*; -import org.elasticsearch.action.search.*; +import org.elasticsearch.action.percolate.MultiPercolateAction; +import org.elasticsearch.action.percolate.MultiPercolateRequest; +import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; +import org.elasticsearch.action.percolate.MultiPercolateResponse; +import org.elasticsearch.action.percolate.PercolateAction; +import org.elasticsearch.action.percolate.PercolateRequest; +import org.elasticsearch.action.percolate.PercolateRequestBuilder; +import org.elasticsearch.action.percolate.PercolateResponse; +import org.elasticsearch.action.search.ClearScrollAction; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollRequestBuilder; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.MultiSearchAction; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchRequestBuilder; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollAction; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.SearchScrollRequestBuilder; import org.elasticsearch.action.suggest.SuggestAction; import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.ThreadedActionListener; -import org.elasticsearch.action.termvectors.*; +import org.elasticsearch.action.termvectors.MultiTermVectorsAction; +import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; +import org.elasticsearch.action.termvectors.MultiTermVectorsRequestBuilder; +import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; +import org.elasticsearch.action.termvectors.TermVectorsAction; +import org.elasticsearch.action.termvectors.TermVectorsRequest; +import org.elasticsearch.action.termvectors.TermVectorsRequestBuilder; +import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.client.*; +import org.elasticsearch.client.AdminClient; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.ClusterAdminClient; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; @@ -913,6 +972,21 @@ public abstract class AbstractClient extends AbstractComponent implements Client return new NodesHotThreadsRequestBuilder(this, NodesHotThreadsAction.INSTANCE).setNodesIds(nodesIds); } + @Override + public ActionFuture listTasks(final ListTasksRequest request) { + return execute(ListTasksAction.INSTANCE, request); + } + + @Override + public void listTasks(final ListTasksRequest request, final ActionListener listener) { + execute(ListTasksAction.INSTANCE, request, listener); + } + + @Override + public ListTasksRequestBuilder prepareListTasks(String... nodesIds) { + return new ListTasksRequestBuilder(this, ListTasksAction.INSTANCE).setNodesIds(nodesIds); + } + @Override public ActionFuture searchShards(final ClusterSearchShardsRequest request) { return execute(ClusterSearchShardsAction.INSTANCE, request); @@ -1315,6 +1389,21 @@ public abstract class AbstractClient extends AbstractComponent implements Client return new FlushRequestBuilder(this, FlushAction.INSTANCE).setIndices(indices); } + @Override + public ActionFuture syncedFlush(SyncedFlushRequest request) { + return execute(SyncedFlushAction.INSTANCE, request); + } + + @Override + public void syncedFlush(SyncedFlushRequest request, ActionListener listener) { + execute(SyncedFlushAction.INSTANCE, request, listener); + } + + @Override + public SyncedFlushRequestBuilder prepareSyncedFlush(String... indices) { + return new SyncedFlushRequestBuilder(this, SyncedFlushAction.INSTANCE).setIndices(indices); + } + @Override public void getMappings(GetMappingsRequest request, ActionListener listener) { execute(GetMappingsAction.INSTANCE, request, listener); diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 33cf3479419..3b8be668f43 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -32,7 +32,6 @@ import org.elasticsearch.client.support.Headers; import org.elasticsearch.client.transport.support.TransportProxyClient; import org.elasticsearch.cluster.ClusterNameModule; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.Module; @@ -43,19 +42,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.indices.breaker.CircuitBreakerModule; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.node.internal.InternalSettingsPreparer; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsModule; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.search.SearchModule; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.netty.NettyTransport; @@ -69,7 +64,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; * The transport client allows to create a client that is not part of the cluster, but simply connects to one * or more nodes directly by adding their respective addresses using {@link #addTransportAddress(org.elasticsearch.common.transport.TransportAddress)}. *

- * The transport client important modules used is the {@link org.elasticsearch.transport.TransportModule} which is + * The transport client important modules used is the {@link org.elasticsearch.common.network.NetworkModule} which is * started in client mode (only connects, no bind). */ public class TransportClient extends AbstractClient { @@ -143,10 +138,9 @@ public class TransportClient extends AbstractClient { } modules.add(new PluginsModule(pluginsService)); modules.add(new SettingsModule(this.settings, settingsFilter )); - modules.add(new NetworkModule(networkService)); + modules.add(new NetworkModule(networkService, this.settings, true)); modules.add(new ClusterNameModule(this.settings)); modules.add(new ThreadPoolModule(threadPool)); - modules.add(new TransportModule(this.settings)); modules.add(new SearchModule() { @Override protected void configure() { @@ -154,7 +148,6 @@ public class TransportClient extends AbstractClient { } }); modules.add(new ActionModule(true)); - modules.add(new ClientTransportModule()); modules.add(new CircuitBreakerModule(this.settings)); pluginsService.processModules(modules); diff --git a/core/src/main/java/org/elasticsearch/cluster/AbstractDiffable.java b/core/src/main/java/org/elasticsearch/cluster/AbstractDiffable.java index 4e6da2bd569..317faefa368 100644 --- a/core/src/main/java/org/elasticsearch/cluster/AbstractDiffable.java +++ b/core/src/main/java/org/elasticsearch/cluster/AbstractDiffable.java @@ -20,9 +20,9 @@ package org.elasticsearch.cluster; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.io.stream.StreamableReader; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.StreamableReader; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 12be047f17d..3ba01171dfc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -19,9 +19,6 @@ package org.elasticsearch.cluster; -import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; -import org.elasticsearch.action.support.DestructiveOperations; -import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction; import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction; @@ -29,7 +26,6 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateFilter; -import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService; import org.elasticsearch.cluster.metadata.MetaDataIndexAliasesService; @@ -60,7 +56,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocatio import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.InternalClusterService; -import org.elasticsearch.cluster.settings.ClusterDynamicSettings; import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.AbstractModule; @@ -68,11 +63,9 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; -import org.elasticsearch.discovery.DiscoverySettings; -import org.elasticsearch.discovery.zen.ZenDiscovery; -import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.PrimaryShardAllocator; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.indexing.IndexingSlowLog; import org.elasticsearch.index.search.stats.SearchSlowLog; @@ -81,17 +74,10 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.store.IndexStore; -import org.elasticsearch.index.store.IndexStoreConfig; -import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndicesWarmer; -import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.cache.request.IndicesRequestCache; -import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.ttl.IndicesTTLService; -import org.elasticsearch.search.SearchService; import org.elasticsearch.search.internal.DefaultSearchContext; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import java.util.Arrays; import java.util.Collections; @@ -122,7 +108,6 @@ public class ClusterModule extends AbstractModule { SnapshotInProgressAllocationDecider.class)); private final Settings settings; - private final DynamicSettings.Builder clusterDynamicSettings = new DynamicSettings.Builder(); private final DynamicSettings.Builder indexDynamicSettings = new DynamicSettings.Builder(); private final ExtensionPoint.SelectedType shardsAllocators = new ExtensionPoint.SelectedType<>("shards_allocator", ShardsAllocator.class); private final ExtensionPoint.ClassSet allocationDeciders = new ExtensionPoint.ClassSet<>("allocation_decider", AllocationDecider.class, AllocationDeciders.class); @@ -134,7 +119,6 @@ public class ClusterModule extends AbstractModule { public ClusterModule(Settings settings) { this.settings = settings; - registerBuiltinClusterSettings(); registerBuiltinIndexSettings(); for (Class decider : ClusterModule.DEFAULT_ALLOCATION_DECIDERS) { @@ -144,70 +128,10 @@ public class ClusterModule extends AbstractModule { registerShardsAllocator(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR, BalancedShardsAllocator.class); } - private void registerBuiltinClusterSettings() { - registerClusterDynamicSetting(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES, Validator.EMPTY); - registerClusterDynamicSetting(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP + "*", Validator.EMPTY); - registerClusterDynamicSetting(BalancedShardsAllocator.SETTING_INDEX_BALANCE_FACTOR, Validator.FLOAT); - registerClusterDynamicSetting(BalancedShardsAllocator.SETTING_SHARD_BALANCE_FACTOR, Validator.FLOAT); - registerClusterDynamicSetting(BalancedShardsAllocator.SETTING_THRESHOLD, Validator.NON_NEGATIVE_FLOAT); - registerClusterDynamicSetting(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ALLOCATION_ALLOW_REBALANCE_VALIDATOR); - registerClusterDynamicSetting(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, Validator.INTEGER); - registerClusterDynamicSetting(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY); - registerClusterDynamicSetting(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, Validator.EMPTY); - registerClusterDynamicSetting(ZenDiscovery.SETTING_REJOIN_ON_MASTER_GONE, Validator.BOOLEAN); - registerClusterDynamicSetting(DiscoverySettings.NO_MASTER_BLOCK, Validator.EMPTY); - registerClusterDynamicSetting(FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP + "*", Validator.EMPTY); - registerClusterDynamicSetting(FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP + "*", Validator.EMPTY); - registerClusterDynamicSetting(FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP + "*", Validator.EMPTY); - registerClusterDynamicSetting(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE, Validator.EMPTY); - registerClusterDynamicSetting(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); - registerClusterDynamicSetting(IndicesTTLService.INDICES_TTL_INTERVAL, Validator.TIME); - registerClusterDynamicSetting(MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT, Validator.TIME); - registerClusterDynamicSetting(MetaData.SETTING_READ_ONLY, Validator.EMPTY); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, Validator.POSITIVE_INTEGER); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, Validator.POSITIVE_INTEGER); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(ThreadPool.THREADPOOL_GROUP + "*", ThreadPool.THREAD_POOL_TYPE_SETTINGS_VALIDATOR); - registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, Validator.INTEGER); - registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, Validator.INTEGER); - registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, Validator.EMPTY); - registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, Validator.EMPTY); - registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, Validator.BOOLEAN); - registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, Validator.BOOLEAN); - registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(SnapshotInProgressAllocationDecider.CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED, Validator.EMPTY); - registerClusterDynamicSetting(DestructiveOperations.REQUIRES_NAME, Validator.EMPTY); - registerClusterDynamicSetting(DiscoverySettings.PUBLISH_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(DiscoverySettings.PUBLISH_DIFF_ENABLE, Validator.BOOLEAN); - registerClusterDynamicSetting(DiscoverySettings.COMMIT_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, Validator.MEMORY_SIZE); - registerClusterDynamicSetting(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, Validator.MEMORY_SIZE); - registerClusterDynamicSetting(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, Validator.NON_NEGATIVE_DOUBLE); - registerClusterDynamicSetting(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, Validator.MEMORY_SIZE); - registerClusterDynamicSetting(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, Validator.NON_NEGATIVE_DOUBLE); - registerClusterDynamicSetting(InternalClusterService.SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(SearchService.DEFAULT_SEARCH_TIMEOUT, Validator.TIMEOUT); - registerClusterDynamicSetting(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_VALIDATOR); - registerClusterDynamicSetting(TransportService.SETTING_TRACE_LOG_INCLUDE, Validator.EMPTY); - registerClusterDynamicSetting(TransportService.SETTING_TRACE_LOG_INCLUDE + ".*", Validator.EMPTY); - registerClusterDynamicSetting(TransportService.SETTING_TRACE_LOG_EXCLUDE, Validator.EMPTY); - registerClusterDynamicSetting(TransportService.SETTING_TRACE_LOG_EXCLUDE + ".*", Validator.EMPTY); - registerClusterDynamicSetting(TransportCloseIndexAction.SETTING_CLUSTER_INDICES_CLOSE_ENABLE, Validator.BOOLEAN); - registerClusterDynamicSetting(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE, Validator.INTEGER); - registerClusterDynamicSetting(TransportReplicationAction.SHARD_FAILURE_TIMEOUT, Validator.TIME_NON_NEGATIVE); - } - private void registerBuiltinIndexSettings() { registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_TYPE, Validator.EMPTY); - registerIndexDynamicSetting(MergeSchedulerConfig.MAX_THREAD_COUNT, Validator.EMPTY); + registerIndexDynamicSetting(MergeSchedulerConfig.MAX_THREAD_COUNT, Validator.NON_NEGATIVE_INTEGER); registerIndexDynamicSetting(MergeSchedulerConfig.MAX_MERGE_COUNT, Validator.EMPTY); registerIndexDynamicSetting(MergeSchedulerConfig.AUTO_THROTTLE, Validator.EMPTY); registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_REQUIRE_GROUP + "*", Validator.EMPTY); @@ -215,7 +139,6 @@ public class ClusterModule extends AbstractModule { registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "*", Validator.EMPTY); registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY); registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Validator.EMPTY); - registerIndexDynamicSetting(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, Validator.EMPTY); registerIndexDynamicSetting(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, Validator.NON_NEGATIVE_INTEGER); registerIndexDynamicSetting(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, Validator.EMPTY); registerIndexDynamicSetting(IndexMetaData.SETTING_READ_ONLY, Validator.EMPTY); @@ -227,7 +150,6 @@ public class ClusterModule extends AbstractModule { registerIndexDynamicSetting(IndicesTTLService.INDEX_TTL_DISABLE_PURGE, Validator.EMPTY); registerIndexDynamicSetting(IndexShard.INDEX_REFRESH_INTERVAL, Validator.TIME); registerIndexDynamicSetting(PrimaryShardAllocator.INDEX_RECOVERY_INITIAL_SHARDS, Validator.EMPTY); - registerIndexDynamicSetting(EngineConfig.INDEX_COMPOUND_ON_FLUSH, Validator.BOOLEAN); registerIndexDynamicSetting(EngineConfig.INDEX_GC_DELETES_SETTING, Validator.TIME); registerIndexDynamicSetting(IndexShard.INDEX_FLUSH_ON_CLOSE, Validator.BOOLEAN); registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN, Validator.TIME); @@ -256,13 +178,10 @@ public class ClusterModule extends AbstractModule { registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, Validator.DOUBLE_GTE_2); registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT, Validator.NON_NEGATIVE_DOUBLE); registerIndexDynamicSetting(MergePolicyConfig.INDEX_COMPOUND_FORMAT, Validator.EMPTY); - registerIndexDynamicSetting(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, Validator.INTEGER); registerIndexDynamicSetting(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, Validator.BYTES_SIZE); - registerIndexDynamicSetting(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, Validator.EMPTY); - registerIndexDynamicSetting(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Validator.EMPTY); + registerIndexDynamicSetting(IndexSettings.INDEX_TRANSLOG_DURABILITY, Validator.EMPTY); registerIndexDynamicSetting(IndicesWarmer.INDEX_WARMER_ENABLED, Validator.EMPTY); registerIndexDynamicSetting(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN); - registerIndexDynamicSetting(IndicesRequestCache.DEPRECATED_INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN); registerIndexDynamicSetting(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, Validator.TIME); registerIndexDynamicSetting(DefaultSearchContext.MAX_RESULT_WINDOW, Validator.POSITIVE_INTEGER); } @@ -271,9 +190,6 @@ public class ClusterModule extends AbstractModule { indexDynamicSettings.addSetting(setting, validator); } - public void registerClusterDynamicSetting(String setting, Validator validator) { - clusterDynamicSettings.addSetting(setting, validator); - } public void registerAllocationDecider(Class allocationDecider) { allocationDeciders.registerExtension(allocationDecider); @@ -289,7 +205,6 @@ public class ClusterModule extends AbstractModule { @Override protected void configure() { - bind(DynamicSettings.class).annotatedWith(ClusterDynamicSettings.class).toInstance(clusterDynamicSettings.build()); bind(DynamicSettings.class).annotatedWith(IndexDynamicSettings.class).toInstance(indexDynamicSettings.build()); // bind ShardsAllocator diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterService.java b/core/src/main/java/org/elasticsearch/cluster/ClusterService.java index b682b0cc61d..12845fa3fa4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterService.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.TaskManager; import java.util.List; @@ -148,4 +149,9 @@ public interface ClusterService extends LifecycleComponent { * @return A zero time value if the queue is empty, otherwise the time value oldest task waiting in the queue */ TimeValue getMaxTaskWaitTime(); + + /** + * Returns task manager created in the cluster service + */ + TaskManager getTaskManager(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java index e20f21b4cec..dd8c737b6b0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -30,7 +30,12 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.Nullable; @@ -51,7 +56,11 @@ import org.elasticsearch.discovery.local.LocalDiscovery; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import java.io.IOException; -import java.util.*; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.Set; /** * Represents the current state of the cluster. @@ -129,7 +138,7 @@ public class ClusterState implements ToXContent, Diffable { @SuppressWarnings("unchecked") T proto = (T)customPrototypes.get(type); if (proto == null) { - throw new IllegalArgumentException("No custom state prototype registered for type [" + type + "]"); + throw new IllegalArgumentException("No custom state prototype registered for type [" + type + "], node likely missing plugins"); } return proto; } diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java index a035cf7c368..df857623570 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java @@ -50,7 +50,7 @@ public class ClusterStateObserver { final AtomicReference lastObservedState; final TimeoutClusterStateListener clusterStateListener = new ObserverClusterStateListener(); // observingContext is not null when waiting on cluster state changes - final AtomicReference observingContext = new AtomicReference(null); + final AtomicReference observingContext = new AtomicReference<>(null); volatile Long startTimeNS; volatile boolean timedOut; @@ -117,7 +117,7 @@ public class ClusterStateObserver { if (timeOutValue != null) { long timeSinceStartMS = TimeValue.nsecToMSec(System.nanoTime() - startTimeNS); timeoutTimeLeftMS = timeOutValue.millis() - timeSinceStartMS; - if (timeoutTimeLeftMS <= 0l) { + if (timeoutTimeLeftMS <= 0L) { // things have timeout while we were busy -> notify logger.trace("observer timed out. notifying listener. timeout setting [{}], time since start [{}]", timeOutValue, new TimeValue(timeSinceStartMS)); // update to latest, in case people want to retry @@ -238,7 +238,7 @@ public class ClusterStateObserver { } } - public static interface Listener { + public interface Listener { /** called when a new state is observed */ void onNewClusterState(ClusterState state); @@ -256,15 +256,17 @@ public class ClusterStateObserver { * * @return true if newState should be accepted */ - public boolean apply(ClusterState previousState, ClusterState.ClusterStateStatus previousStatus, - ClusterState newState, ClusterState.ClusterStateStatus newStatus); + boolean apply(ClusterState previousState, + ClusterState.ClusterStateStatus previousStatus, + ClusterState newState, + ClusterState.ClusterStateStatus newStatus); /** * called to see whether a cluster change should be accepted * * @return true if changedEvent.state() should be accepted */ - public boolean apply(ClusterChangedEvent changedEvent); + boolean apply(ClusterChangedEvent changedEvent); } @@ -272,20 +274,14 @@ public class ClusterStateObserver { @Override public boolean apply(ClusterState previousState, ClusterState.ClusterStateStatus previousStatus, ClusterState newState, ClusterState.ClusterStateStatus newStatus) { - if (previousState != newState || previousStatus != newStatus) { - return validate(newState); - } - return false; + return (previousState != newState || previousStatus != newStatus) && validate(newState); } protected abstract boolean validate(ClusterState newState); @Override public boolean apply(ClusterChangedEvent changedEvent) { - if (changedEvent.previousState().version() != changedEvent.state().version()) { - return validate(changedEvent.state()); - } - return false; + return changedEvent.previousState().version() != changedEvent.state().version() && validate(changedEvent.state()); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java index ab85d9540f0..fb22c2ca368 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java @@ -37,6 +37,13 @@ public interface ClusterStateTaskExecutor { return true; } + /** + * Callback invoked after new cluster state is published. Note that + * this method is not invoked if the cluster state was not updated. + */ + default void clusterStatePublished(ClusterState newClusterState) { + } + /** * Represents the result of a batched execution of cluster state update tasks * @param the type of the cluster state update task diff --git a/core/src/main/java/org/elasticsearch/cluster/Diffable.java b/core/src/main/java/org/elasticsearch/cluster/Diffable.java index 7ce60047a2b..cdad098c38e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/Diffable.java +++ b/core/src/main/java/org/elasticsearch/cluster/Diffable.java @@ -19,8 +19,8 @@ package org.elasticsearch.cluster; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java b/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java index 1488f059437..fb9d7159105 100644 --- a/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java +++ b/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java @@ -23,7 +23,6 @@ import com.carrotsearch.hppc.cursors.IntCursor; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/cluster/IncompatibleClusterStateVersionException.java b/core/src/main/java/org/elasticsearch/cluster/IncompatibleClusterStateVersionException.java index f191c202c45..fb5f2334969 100644 --- a/core/src/main/java/org/elasticsearch/cluster/IncompatibleClusterStateVersionException.java +++ b/core/src/main/java/org/elasticsearch/cluster/IncompatibleClusterStateVersionException.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.cluster.Diff; import org.elasticsearch.common.io.stream.StreamInput; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 039868d16c4..925a5a12ed6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -37,11 +37,12 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.monitor.fs.FsInfo; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ReceiveTimeoutTransportException; @@ -63,8 +64,8 @@ import java.util.concurrent.TimeUnit; */ public class InternalClusterInfoService extends AbstractComponent implements ClusterInfoService, LocalNodeMasterListener, ClusterStateListener { - public static final String INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL = "cluster.info.update.interval"; - public static final String INTERNAL_CLUSTER_INFO_TIMEOUT = "cluster.info.update.timeout"; + public static final Setting INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING = Setting.timeSetting("cluster.info.update.interval", TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(10), true, Setting.Scope.CLUSTER); + public static final Setting INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING = Setting.positiveTimeSetting("cluster.info.update.timeout", TimeValue.timeValueSeconds(15), true, Setting.Scope.CLUSTER); private volatile TimeValue updateFrequency; @@ -82,7 +83,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu private final List listeners = new CopyOnWriteArrayList<>(); @Inject - public InternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService, + public InternalClusterInfoService(Settings settings, ClusterSettings clusterSettings, TransportNodesStatsAction transportNodesStatsAction, TransportIndicesStatsAction transportIndicesStatsAction, ClusterService clusterService, ThreadPool threadPool) { @@ -95,10 +96,12 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu this.transportIndicesStatsAction = transportIndicesStatsAction; this.clusterService = clusterService; this.threadPool = threadPool; - this.updateFrequency = settings.getAsTime(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, TimeValue.timeValueSeconds(30)); - this.fetchTimeout = settings.getAsTime(INTERNAL_CLUSTER_INFO_TIMEOUT, TimeValue.timeValueSeconds(15)); - this.enabled = settings.getAsBoolean(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true); - nodeSettingsService.addListener(new ApplySettings()); + this.updateFrequency = INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.get(settings); + this.fetchTimeout = INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.get(settings); + this.enabled = DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.get(settings); + clusterSettings.addSettingsUpdateConsumer(INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING, this::setFetchTimeout); + clusterSettings.addSettingsUpdateConsumer(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING, this::setUpdateFrequency); + clusterSettings.addSettingsUpdateConsumer(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, this::setEnabled); // Add InternalClusterInfoService to listen for Master changes this.clusterService.add((LocalNodeMasterListener)this); @@ -106,35 +109,16 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu this.clusterService.add((ClusterStateListener)this); } - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - TimeValue newUpdateFrequency = settings.getAsTime(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, null); - // ClusterInfoService is only enabled if the DiskThresholdDecider is enabled - Boolean newEnabled = settings.getAsBoolean(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, null); + private void setEnabled(boolean enabled) { + this.enabled = enabled; + } - if (newUpdateFrequency != null) { - if (newUpdateFrequency.getMillis() < TimeValue.timeValueSeconds(10).getMillis()) { - logger.warn("[{}] set too low [{}] (< 10s)", INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, newUpdateFrequency); - throw new IllegalStateException("Unable to set " + INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL + " less than 10 seconds"); - } else { - logger.info("updating [{}] from [{}] to [{}]", INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, updateFrequency, newUpdateFrequency); - InternalClusterInfoService.this.updateFrequency = newUpdateFrequency; - } - } + private void setFetchTimeout(TimeValue fetchTimeout) { + this.fetchTimeout = fetchTimeout; + } - TimeValue newFetchTimeout = settings.getAsTime(INTERNAL_CLUSTER_INFO_TIMEOUT, null); - if (newFetchTimeout != null) { - logger.info("updating fetch timeout [{}] from [{}] to [{}]", INTERNAL_CLUSTER_INFO_TIMEOUT, fetchTimeout, newFetchTimeout); - InternalClusterInfoService.this.fetchTimeout = newFetchTimeout; - } - - - // We don't log about enabling it here, because the DiskThresholdDecider will already be logging about enable/disable - if (newEnabled != null) { - InternalClusterInfoService.this.enabled = newEnabled; - } - } + void setUpdateFrequency(TimeValue updateFrequency) { + this.updateFrequency = updateFrequency; } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java b/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java index dd7eb9f0c6d..a083476ea2f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java +++ b/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.cluster.ClusterState.Custom; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.common.collect.ImmutableOpenMap; diff --git a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index 6ee8365d378..9a112613b1d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.ObjectContainer; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.cluster.ClusterState.Custom; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.common.collect.ImmutableOpenMap; diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java index e3925aa6f4e..9e57fe3a48a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java @@ -26,11 +26,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; -import org.elasticsearch.node.settings.NodeSettingsService; import java.util.concurrent.TimeoutException; @@ -40,30 +41,23 @@ import java.util.concurrent.TimeoutException; */ public class MappingUpdatedAction extends AbstractComponent { - public static final String INDICES_MAPPING_DYNAMIC_TIMEOUT = "indices.mapping.dynamic_timeout"; + public static final Setting INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30), true, Setting.Scope.CLUSTER); private IndicesAdminClient client; private volatile TimeValue dynamicMappingUpdateTimeout; - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - TimeValue current = MappingUpdatedAction.this.dynamicMappingUpdateTimeout; - TimeValue newValue = settings.getAsTime(INDICES_MAPPING_DYNAMIC_TIMEOUT, current); - if (!current.equals(newValue)) { - logger.info("updating " + INDICES_MAPPING_DYNAMIC_TIMEOUT + " from [{}] to [{}]", current, newValue); - MappingUpdatedAction.this.dynamicMappingUpdateTimeout = newValue; - } - } + @Inject + public MappingUpdatedAction(Settings settings, ClusterSettings clusterSettings) { + super(settings); + this.dynamicMappingUpdateTimeout = INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING.get(settings); + clusterSettings.addSettingsUpdateConsumer(INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, this::setDynamicMappingUpdateTimeout); } - @Inject - public MappingUpdatedAction(Settings settings, NodeSettingsService nodeSettingsService) { - super(settings); - this.dynamicMappingUpdateTimeout = settings.getAsTime(INDICES_MAPPING_DYNAMIC_TIMEOUT, TimeValue.timeValueSeconds(30)); - nodeSettingsService.addListener(new ApplySettings()); + private void setDynamicMappingUpdateTimeout(TimeValue dynamicMappingUpdateTimeout) { + this.dynamicMappingUpdateTimeout = dynamicMappingUpdateTimeout; } + public void setClient(Client client) { this.client = client.admin().indices(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java index 4079f14abc7..d4f453530bc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java @@ -33,7 +33,12 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.List; @@ -191,4 +196,4 @@ public class NodeIndexDeletedAction extends AbstractComponent { nodeId = in.readString(); } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java index f8507e5b689..c7dddce36f7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java @@ -31,7 +31,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index d09df094a68..58b766e8d84 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -20,7 +20,12 @@ package org.elasticsearch.cluster.action.shard; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingService; @@ -34,128 +39,152 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import static org.elasticsearch.cluster.routing.ShardRouting.readShardRoutingEntry; -/** - * - */ public class ShardStateAction extends AbstractComponent { - public static final String SHARD_STARTED_ACTION_NAME = "internal:cluster/shard/started"; public static final String SHARD_FAILED_ACTION_NAME = "internal:cluster/shard/failure"; private final TransportService transportService; - private final ClusterService clusterService; - private final AllocationService allocationService; - private final RoutingService routingService; @Inject public ShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService) { super(settings); - this.clusterService = clusterService; this.transportService = transportService; - this.allocationService = allocationService; - this.routingService = routingService; - transportService.registerRequestHandler(SHARD_STARTED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardStartedTransportHandler()); - transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler()); + transportService.registerRequestHandler(SHARD_STARTED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardStartedTransportHandler(clusterService, new ShardStartedClusterStateTaskExecutor(allocationService, logger), logger)); + transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler(clusterService, new ShardFailedClusterStateTaskExecutor(allocationService, routingService, logger), logger)); } - public void shardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { - shardFailed(shardRouting, indexUUID, message, failure, null, listener); + public void shardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { + shardFailed(clusterState, shardRouting, indexUUID, message, failure, null, listener); } - public void shardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, TimeValue timeout, Listener listener) { - DiscoveryNode masterNode = clusterService.state().nodes().masterNode(); + public void resendShardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { + logger.trace("{} re-sending failed shard [{}], index UUID [{}], reason [{}]", shardRouting.shardId(), failure, shardRouting, indexUUID, message); + shardFailed(clusterState, shardRouting, indexUUID, message, failure, listener); + } + + public void shardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, TimeValue timeout, Listener listener) { + DiscoveryNode masterNode = clusterState.nodes().masterNode(); if (masterNode == null) { - logger.warn("can't send shard failed for {}, no master known.", shardRouting); + logger.warn("{} no master known to fail shard [{}]", shardRouting.shardId(), shardRouting); listener.onShardFailedNoMaster(); return; } - innerShardFailed(shardRouting, indexUUID, masterNode, message, failure, timeout, listener); - } - - public void resendShardFailed(final ShardRouting shardRouting, final String indexUUID, final DiscoveryNode masterNode, final String message, @Nullable final Throwable failure, Listener listener) { - logger.trace("{} re-sending failed shard for {}, indexUUID [{}], reason [{}]", failure, shardRouting.shardId(), shardRouting, indexUUID, message); - innerShardFailed(shardRouting, indexUUID, masterNode, message, failure, null, listener); - } - - private void innerShardFailed(final ShardRouting shardRouting, final String indexUUID, final DiscoveryNode masterNode, final String message, final Throwable failure, TimeValue timeout, Listener listener) { ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, failure); TransportRequestOptions options = TransportRequestOptions.EMPTY; if (timeout != null) { options = TransportRequestOptions.builder().withTimeout(timeout).build(); } transportService.sendRequest(masterNode, - SHARD_FAILED_ACTION_NAME, shardRoutingEntry, options, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { - @Override - public void handleResponse(TransportResponse.Empty response) { - listener.onSuccess(); - } + SHARD_FAILED_ACTION_NAME, shardRoutingEntry, options, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { + @Override + public void handleResponse(TransportResponse.Empty response) { + listener.onSuccess(); + } - @Override - public void handleException(TransportException exp) { - logger.warn("failed to send failed shard to {}", exp, masterNode); - listener.onShardFailedFailure(masterNode, exp); - } - }); + @Override + public void handleException(TransportException exp) { + logger.warn("{} unexpected failure while sending request to [{}] to fail shard [{}]", exp, shardRoutingEntry.shardRouting.shardId(), masterNode, shardRoutingEntry); + listener.onShardFailedFailure(masterNode, exp); + } + }); } - public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String reason) { - DiscoveryNode masterNode = clusterService.state().nodes().masterNode(); - if (masterNode == null) { - logger.warn("{} can't send shard started for {}, no master known.", shardRouting.shardId(), shardRouting); - return; + private static class ShardFailedTransportHandler implements TransportRequestHandler { + private final ClusterService clusterService; + private final ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor; + private final ESLogger logger; + + public ShardFailedTransportHandler(ClusterService clusterService, ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor, ESLogger logger) { + this.clusterService = clusterService; + this.shardFailedClusterStateTaskExecutor = shardFailedClusterStateTaskExecutor; + this.logger = logger; } - shardStarted(shardRouting, indexUUID, reason, masterNode); - } - public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String reason, final DiscoveryNode masterNode) { - ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, reason, null); - logger.debug("{} sending shard started for {}", shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); - transportService.sendRequest(masterNode, - SHARD_STARTED_ACTION_NAME, new ShardRoutingEntry(shardRouting, indexUUID, reason, null), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { + @Override + public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { + logger.warn("{} received shard failed for {}", request.failure, request.shardRouting.shardId(), request); + clusterService.submitStateUpdateTask( + "shard-failed (" + request.shardRouting + "), message [" + request.message + "]", + request, + ClusterStateTaskConfig.build(Priority.HIGH), + shardFailedClusterStateTaskExecutor, + new ClusterStateTaskListener() { @Override - public void handleException(TransportException exp) { - logger.warn("failed to send shard started to [{}]", exp, masterNode); + public void onFailure(String source, Throwable t) { + logger.error("{} unexpected failure while failing shard [{}]", t, request.shardRouting.shardId(), request.shardRouting); + try { + channel.sendResponse(t); + } catch (Throwable channelThrowable) { + logger.warn("{} failed to send failure [{}] while failing shard [{}]", channelThrowable, request.shardRouting.shardId(), t, request.shardRouting); + } } - }); + @Override + public void onNoLongerMaster(String source) { + logger.error("{} no longer master while failing shard [{}]", request.shardRouting.shardId(), request.shardRouting); + try { + channel.sendResponse(new NotMasterException(source)); + } catch (Throwable channelThrowable) { + logger.warn("{} failed to send no longer master while failing shard [{}]", channelThrowable, request.shardRouting.shardId(), request.shardRouting); + } + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + try { + channel.sendResponse(TransportResponse.Empty.INSTANCE); + } catch (Throwable channelThrowable) { + logger.warn("{} failed to send response while failing shard [{}]", channelThrowable, request.shardRouting.shardId(), request.shardRouting); + } + } + } + ); + } } - private final ShardFailedClusterStateHandler shardFailedClusterStateHandler = new ShardFailedClusterStateHandler(); + private static class ShardFailedClusterStateTaskExecutor implements ClusterStateTaskExecutor { + private final AllocationService allocationService; + private final RoutingService routingService; + private final ESLogger logger; - private void handleShardFailureOnMaster(final ShardRoutingEntry shardRoutingEntry) { - logger.warn("{} received shard failed for {}", shardRoutingEntry.failure, shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); - clusterService.submitStateUpdateTask( - "shard-failed (" + shardRoutingEntry.shardRouting + "), message [" + shardRoutingEntry.message + "]", - shardRoutingEntry, - ClusterStateTaskConfig.build(Priority.HIGH), - shardFailedClusterStateHandler, - shardFailedClusterStateHandler); - } + public ShardFailedClusterStateTaskExecutor(AllocationService allocationService, RoutingService routingService, ESLogger logger) { + this.allocationService = allocationService; + this.routingService = routingService; + this.logger = logger; + } - class ShardFailedClusterStateHandler implements ClusterStateTaskExecutor, ClusterStateTaskListener { @Override public BatchResult execute(ClusterState currentState, List tasks) throws Exception { BatchResult.Builder batchResultBuilder = BatchResult.builder(); - List shardRoutingsToBeApplied = new ArrayList<>(tasks.size()); + List failedShards = new ArrayList<>(tasks.size()); for (ShardRoutingEntry task : tasks) { - shardRoutingsToBeApplied.add(new FailedRerouteAllocation.FailedShard(task.shardRouting, task.message, task.failure)); + failedShards.add(new FailedRerouteAllocation.FailedShard(task.shardRouting, task.message, task.failure)); } ClusterState maybeUpdatedState = currentState; try { - RoutingAllocation.Result result = allocationService.applyFailedShards(currentState, shardRoutingsToBeApplied); + RoutingAllocation.Result result = allocationService.applyFailedShards(currentState, failedShards); if (result.changed()) { maybeUpdatedState = ClusterState.builder(currentState).routingResult(result).build(); } @@ -167,34 +196,68 @@ public class ShardStateAction extends AbstractComponent { } @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - if (oldState != newState && newState.getRoutingNodes().unassigned().size() > 0) { - logger.trace("unassigned shards after shard failures. scheduling a reroute."); - routingService.reroute("unassigned shards after shard failures, scheduling a reroute"); + public void clusterStatePublished(ClusterState newClusterState) { + int numberOfUnassignedShards = newClusterState.getRoutingNodes().unassigned().size(); + if (numberOfUnassignedShards > 0) { + String reason = String.format(Locale.ROOT, "[%d] unassigned shards after failing shards", numberOfUnassignedShards); + if (logger.isTraceEnabled()) { + logger.trace(reason + ", scheduling a reroute"); } + routingService.reroute(reason); + } + } + } + + public void shardStarted(final ClusterState clusterState, final ShardRouting shardRouting, String indexUUID, final String reason) { + DiscoveryNode masterNode = clusterState.nodes().masterNode(); + if (masterNode == null) { + logger.warn("{} no master known to start shard [{}]", shardRouting.shardId(), shardRouting); + return; + } + ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, reason, null); + logger.debug("sending start shard [{}]", shardRoutingEntry); + transportService.sendRequest(masterNode, + SHARD_STARTED_ACTION_NAME, new ShardRoutingEntry(shardRouting, indexUUID, reason, null), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { + @Override + public void handleException(TransportException exp) { + logger.warn("{} failure sending start shard [{}] to [{}]", exp, shardRouting.shardId(), masterNode, shardRouting); + } + }); + } + + private static class ShardStartedTransportHandler implements TransportRequestHandler { + private final ClusterService clusterService; + private final ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor; + private final ESLogger logger; + + public ShardStartedTransportHandler(ClusterService clusterService, ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor, ESLogger logger) { + this.clusterService = clusterService; + this.shardStartedClusterStateTaskExecutor = shardStartedClusterStateTaskExecutor; + this.logger = logger; } @Override - public void onFailure(String source, Throwable t) { - logger.error("unexpected failure during [{}]", t, source); + public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { + logger.debug("{} received shard started for [{}]", request.shardRouting.shardId(), request); + clusterService.submitStateUpdateTask( + "shard-started (" + request.shardRouting + "), reason [" + request.message + "]", + request, + ClusterStateTaskConfig.build(Priority.URGENT), + shardStartedClusterStateTaskExecutor, + shardStartedClusterStateTaskExecutor); + channel.sendResponse(TransportResponse.Empty.INSTANCE); } } - private final ShardStartedClusterStateHandler shardStartedClusterStateHandler = - new ShardStartedClusterStateHandler(); + private static class ShardStartedClusterStateTaskExecutor implements ClusterStateTaskExecutor, ClusterStateTaskListener { + private final AllocationService allocationService; + private final ESLogger logger; - private void shardStartedOnMaster(final ShardRoutingEntry shardRoutingEntry) { - logger.debug("received shard started for {}", shardRoutingEntry); + public ShardStartedClusterStateTaskExecutor(AllocationService allocationService, ESLogger logger) { + this.allocationService = allocationService; + this.logger = logger; + } - clusterService.submitStateUpdateTask( - "shard-started (" + shardRoutingEntry.shardRouting + "), reason [" + shardRoutingEntry.message + "]", - shardRoutingEntry, - ClusterStateTaskConfig.build(Priority.URGENT), - shardStartedClusterStateHandler, - shardStartedClusterStateHandler); - } - - class ShardStartedClusterStateHandler implements ClusterStateTaskExecutor, ClusterStateTaskListener { @Override public BatchResult execute(ClusterState currentState, List tasks) throws Exception { BatchResult.Builder builder = BatchResult.builder(); @@ -223,26 +286,7 @@ public class ShardStateAction extends AbstractComponent { } } - private class ShardFailedTransportHandler implements TransportRequestHandler { - - @Override - public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { - handleShardFailureOnMaster(request); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } - } - - class ShardStartedTransportHandler implements TransportRequestHandler { - - @Override - public void messageReceived(ShardRoutingEntry request, TransportChannel channel) throws Exception { - shardStartedOnMaster(request); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } - } - public static class ShardRoutingEntry extends TransportRequest { - ShardRouting shardRouting; String indexUUID = IndexMetaData.INDEX_UUID_NA_VALUE; String message; @@ -283,8 +327,13 @@ public class ShardStateAction extends AbstractComponent { } public interface Listener { - default void onSuccess() {} - default void onShardFailedNoMaster() {} - default void onShardFailedFailure(final DiscoveryNode master, final TransportException e) {} + default void onSuccess() { + } + + default void onShardFailedNoMaster() { + } + + default void onShardFailedFailure(final DiscoveryNode master, final TransportException e) { + } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java b/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java index 3b12d874ada..d66a2437ef2 100644 --- a/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java +++ b/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java @@ -32,7 +32,11 @@ import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import static org.elasticsearch.cluster.health.ClusterIndexHealth.readClusterIndexHealth; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 669d71477ca..af98d9c2fde 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -39,7 +39,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.FromXContentBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.warmer.IndexWarmersMetaData; @@ -58,7 +62,9 @@ import java.util.Set; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR; -import static org.elasticsearch.common.settings.Settings.*; +import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; /** * @@ -621,7 +627,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild public int numberOfReplicas() { return settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1); } - + public Builder creationDate(long creationDate) { settings = settingsBuilder().put(settings).put(SETTING_CREATION_DATE, creationDate).build(); return this; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index 4e3c19430e9..a26e95c40e0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -50,92 +50,20 @@ public class MappingMetaData extends AbstractDiffable { public static final MappingMetaData PROTO = new MappingMetaData(); - public static class Id { - - public static final Id EMPTY = new Id(null); - - private final String path; - - private final String[] pathElements; - - public Id(String path) { - this.path = path; - if (path == null) { - pathElements = Strings.EMPTY_ARRAY; - } else { - pathElements = Strings.delimitedListToStringArray(path, "."); - } - } - - public boolean hasPath() { - return path != null; - } - - public String path() { - return this.path; - } - - public String[] pathElements() { - return this.pathElements; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Id id = (Id) o; - - if (path != null ? !path.equals(id.path) : id.path != null) return false; - if (!Arrays.equals(pathElements, id.pathElements)) return false; - - return true; - } - - @Override - public int hashCode() { - int result = path != null ? path.hashCode() : 0; - result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); - return result; - } - } - public static class Routing { - public static final Routing EMPTY = new Routing(false, null); + public static final Routing EMPTY = new Routing(false); private final boolean required; - private final String path; - - private final String[] pathElements; - - public Routing(boolean required, String path) { + public Routing(boolean required) { this.required = required; - this.path = path; - if (path == null) { - pathElements = Strings.EMPTY_ARRAY; - } else { - pathElements = Strings.delimitedListToStringArray(path, "."); - } } public boolean required() { return required; } - public boolean hasPath() { - return path != null; - } - - public String path() { - return this.path; - } - - public String[] pathElements() { - return this.pathElements; - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -143,19 +71,12 @@ public class MappingMetaData extends AbstractDiffable { Routing routing = (Routing) o; - if (required != routing.required) return false; - if (path != null ? !path.equals(routing.path) : routing.path != null) return false; - if (!Arrays.equals(pathElements, routing.pathElements)) return false; - - return true; + return required == routing.required; } @Override public int hashCode() { - int result = (required ? 1 : 0); - result = 31 * result + (path != null ? path.hashCode() : 0); - result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); - return result; + return getClass().hashCode() + (required ? 1 : 0); } } @@ -182,31 +103,21 @@ public class MappingMetaData extends AbstractDiffable { } - public static final Timestamp EMPTY = new Timestamp(false, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, + public static final Timestamp EMPTY = new Timestamp(false, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null); private final boolean enabled; - private final String path; - private final String format; - private final String[] pathElements; - private final FormatDateTimeFormatter dateTimeFormatter; private final String defaultTimestamp; private final Boolean ignoreMissing; - public Timestamp(boolean enabled, String path, String format, String defaultTimestamp, Boolean ignoreMissing) { + public Timestamp(boolean enabled, String format, String defaultTimestamp, Boolean ignoreMissing) { this.enabled = enabled; - this.path = path; - if (path == null) { - pathElements = Strings.EMPTY_ARRAY; - } else { - pathElements = Strings.delimitedListToStringArray(path, "."); - } this.format = format; this.dateTimeFormatter = Joda.forPattern(format); this.defaultTimestamp = defaultTimestamp; @@ -217,18 +128,6 @@ public class MappingMetaData extends AbstractDiffable { return enabled; } - public boolean hasPath() { - return path != null; - } - - public String path() { - return this.path; - } - - public String[] pathElements() { - return this.pathElements; - } - public String format() { return this.format; } @@ -258,10 +157,8 @@ public class MappingMetaData extends AbstractDiffable { if (enabled != timestamp.enabled) return false; if (format != null ? !format.equals(timestamp.format) : timestamp.format != null) return false; - if (path != null ? !path.equals(timestamp.path) : timestamp.path != null) return false; if (defaultTimestamp != null ? !defaultTimestamp.equals(timestamp.defaultTimestamp) : timestamp.defaultTimestamp != null) return false; if (ignoreMissing != null ? !ignoreMissing.equals(timestamp.ignoreMissing) : timestamp.ignoreMissing != null) return false; - if (!Arrays.equals(pathElements, timestamp.pathElements)) return false; return true; } @@ -269,9 +166,7 @@ public class MappingMetaData extends AbstractDiffable { @Override public int hashCode() { int result = (enabled ? 1 : 0); - result = 31 * result + (path != null ? path.hashCode() : 0); result = 31 * result + (format != null ? format.hashCode() : 0); - result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); result = 31 * result + (dateTimeFormatter != null ? dateTimeFormatter.hashCode() : 0); result = 31 * result + (defaultTimestamp != null ? defaultTimestamp.hashCode() : 0); result = 31 * result + (ignoreMissing != null ? ignoreMissing.hashCode() : 0); @@ -283,7 +178,6 @@ public class MappingMetaData extends AbstractDiffable { private final CompressedXContent source; - private Id id; private Routing routing; private Timestamp timestamp; private boolean hasParentField; @@ -291,9 +185,8 @@ public class MappingMetaData extends AbstractDiffable { public MappingMetaData(DocumentMapper docMapper) { this.type = docMapper.type(); this.source = docMapper.mappingSource(); - this.id = new Id(docMapper.idFieldMapper().path()); - this.routing = new Routing(docMapper.routingFieldMapper().required(), docMapper.routingFieldMapper().path()); - this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().path(), + this.routing = new Routing(docMapper.routingFieldMapper().required()); + this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), docMapper.timestampFieldMapper().defaultTimestamp(), docMapper.timestampFieldMapper().ignoreMissing()); this.hasParentField = docMapper.parentFieldMapper().active(); @@ -337,40 +230,22 @@ public class MappingMetaData extends AbstractDiffable { } private void initMappers(Map withoutType) { - if (withoutType.containsKey("_id")) { - String path = null; - Map routingNode = (Map) withoutType.get("_id"); - for (Map.Entry entry : routingNode.entrySet()) { - String fieldName = Strings.toUnderscoreCase(entry.getKey()); - Object fieldNode = entry.getValue(); - if (fieldName.equals("path")) { - path = fieldNode.toString(); - } - } - this.id = new Id(path); - } else { - this.id = Id.EMPTY; - } if (withoutType.containsKey("_routing")) { boolean required = false; - String path = null; Map routingNode = (Map) withoutType.get("_routing"); for (Map.Entry entry : routingNode.entrySet()) { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("required")) { required = nodeBooleanValue(fieldNode); - } else if (fieldName.equals("path")) { - path = fieldNode.toString(); } } - this.routing = new Routing(required, path); + this.routing = new Routing(required); } else { this.routing = Routing.EMPTY; } if (withoutType.containsKey("_timestamp")) { boolean enabled = false; - String path = null; String format = TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT; String defaultTimestamp = TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP; Boolean ignoreMissing = null; @@ -380,8 +255,6 @@ public class MappingMetaData extends AbstractDiffable { Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { enabled = nodeBooleanValue(fieldNode); - } else if (fieldName.equals("path")) { - path = fieldNode.toString(); } else if (fieldName.equals("format")) { format = fieldNode.toString(); } else if (fieldName.equals("default") && fieldNode != null) { @@ -390,7 +263,7 @@ public class MappingMetaData extends AbstractDiffable { ignoreMissing = nodeBooleanValue(fieldNode); } } - this.timestamp = new Timestamp(enabled, path, format, defaultTimestamp, ignoreMissing); + this.timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing); } else { this.timestamp = Timestamp.EMPTY; } @@ -401,19 +274,15 @@ public class MappingMetaData extends AbstractDiffable { } } - public MappingMetaData(String type, CompressedXContent source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) { + public MappingMetaData(String type, CompressedXContent source, Routing routing, Timestamp timestamp, boolean hasParentField) { this.type = type; this.source = source; - this.id = id; this.routing = routing; this.timestamp = timestamp; this.hasParentField = hasParentField; } void updateDefaultMapping(MappingMetaData defaultMapping) { - if (id == Id.EMPTY) { - id = defaultMapping.id(); - } if (routing == Routing.EMPTY) { routing = defaultMapping.routing(); } @@ -453,10 +322,6 @@ public class MappingMetaData extends AbstractDiffable { return sourceAsMap(); } - public Id id() { - return this.id; - } - public Routing routing() { return this.routing; } @@ -465,114 +330,14 @@ public class MappingMetaData extends AbstractDiffable { return this.timestamp; } - public ParseContext createParseContext(@Nullable String id, @Nullable String routing, @Nullable String timestamp) { - // We parse the routing even if there is already a routing key in the request in order to make sure that - // they are the same - return new ParseContext( - id == null && id().hasPath(), - routing().hasPath(), - timestamp == null && timestamp().hasPath() - ); - } - - public void parse(XContentParser parser, ParseContext parseContext) throws IOException { - innerParse(parser, parseContext); - } - - private void innerParse(XContentParser parser, ParseContext context) throws IOException { - if (!context.parsingStillNeeded()) { - return; - } - - XContentParser.Token token = parser.currentToken(); - if (token == null) { - token = parser.nextToken(); - } - if (token == XContentParser.Token.START_OBJECT) { - token = parser.nextToken(); - } - String idPart = context.idParsingStillNeeded() ? id().pathElements()[context.locationId] : null; - String routingPart = context.routingParsingStillNeeded() ? routing().pathElements()[context.locationRouting] : null; - String timestampPart = context.timestampParsingStillNeeded() ? timestamp().pathElements()[context.locationTimestamp] : null; - - for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) { - // Must point to field name - String fieldName = parser.currentName(); - // And then the value... - token = parser.nextToken(); - boolean incLocationId = false; - boolean incLocationRouting = false; - boolean incLocationTimestamp = false; - if (context.idParsingStillNeeded() && fieldName.equals(idPart)) { - if (context.locationId + 1 == id.pathElements().length) { - if (!token.isValue()) { - throw new MapperParsingException("id field must be a value but was either an object or an array"); - } - context.id = parser.textOrNull(); - context.idResolved = true; - } else { - incLocationId = true; - } - } - if (context.routingParsingStillNeeded() && fieldName.equals(routingPart)) { - if (context.locationRouting + 1 == routing.pathElements().length) { - context.routing = parser.textOrNull(); - context.routingResolved = true; - } else { - incLocationRouting = true; - } - } - if (context.timestampParsingStillNeeded() && fieldName.equals(timestampPart)) { - if (context.locationTimestamp + 1 == timestamp.pathElements().length) { - context.timestamp = parser.textOrNull(); - context.timestampResolved = true; - } else { - incLocationTimestamp = true; - } - } - - if (incLocationId || incLocationRouting || incLocationTimestamp) { - if (token == XContentParser.Token.START_OBJECT) { - context.locationId += incLocationId ? 1 : 0; - context.locationRouting += incLocationRouting ? 1 : 0; - context.locationTimestamp += incLocationTimestamp ? 1 : 0; - innerParse(parser, context); - context.locationId -= incLocationId ? 1 : 0; - context.locationRouting -= incLocationRouting ? 1 : 0; - context.locationTimestamp -= incLocationTimestamp ? 1 : 0; - } - } else { - parser.skipChildren(); - } - - if (!context.parsingStillNeeded()) { - return; - } - } - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(type()); source().writeTo(out); - // id - if (id().hasPath()) { - out.writeBoolean(true); - out.writeString(id().path()); - } else { - out.writeBoolean(false); - } // routing out.writeBoolean(routing().required()); - if (routing().hasPath()) { - out.writeBoolean(true); - out.writeString(routing().path()); - } else { - out.writeBoolean(false); - } // timestamp out.writeBoolean(timestamp().enabled()); - out.writeOptionalString(timestamp().path()); out.writeString(timestamp().format()); out.writeOptionalString(timestamp().defaultTimestamp()); out.writeOptionalBoolean(timestamp().ignoreMissing()); @@ -586,7 +351,6 @@ public class MappingMetaData extends AbstractDiffable { MappingMetaData that = (MappingMetaData) o; - if (!id.equals(that.id)) return false; if (!routing.equals(that.routing)) return false; if (!source.equals(that.source)) return false; if (!timestamp.equals(that.timestamp)) return false; @@ -599,7 +363,6 @@ public class MappingMetaData extends AbstractDiffable { public int hashCode() { int result = type.hashCode(); result = 31 * result + source.hashCode(); - result = 31 * result + id.hashCode(); result = 31 * result + routing.hashCode(); result = 31 * result + timestamp.hashCode(); return result; @@ -608,142 +371,20 @@ public class MappingMetaData extends AbstractDiffable { public MappingMetaData readFrom(StreamInput in) throws IOException { String type = in.readString(); CompressedXContent source = CompressedXContent.readCompressedString(in); - // id - Id id = new Id(in.readBoolean() ? in.readString() : null); // routing - Routing routing = new Routing(in.readBoolean(), in.readBoolean() ? in.readString() : null); + Routing routing = new Routing(in.readBoolean()); // timestamp boolean enabled = in.readBoolean(); - String path = in.readOptionalString(); String format = in.readString(); String defaultTimestamp = in.readOptionalString(); Boolean ignoreMissing = null; ignoreMissing = in.readOptionalBoolean(); - final Timestamp timestamp = new Timestamp(enabled, path, format, defaultTimestamp, ignoreMissing); + final Timestamp timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing); final boolean hasParentField = in.readBoolean(); - return new MappingMetaData(type, source, id, routing, timestamp, hasParentField); + return new MappingMetaData(type, source, routing, timestamp, hasParentField); } - public static class ParseContext { - final boolean shouldParseId; - final boolean shouldParseRouting; - final boolean shouldParseTimestamp; - - int locationId = 0; - int locationRouting = 0; - int locationTimestamp = 0; - boolean idResolved; - boolean routingResolved; - boolean timestampResolved; - String id; - String routing; - String timestamp; - - public ParseContext(boolean shouldParseId, boolean shouldParseRouting, boolean shouldParseTimestamp) { - this.shouldParseId = shouldParseId; - this.shouldParseRouting = shouldParseRouting; - this.shouldParseTimestamp = shouldParseTimestamp; - } - - /** - * The id value parsed, null if does not require parsing, or not resolved. - */ - public String id() { - return id; - } - - /** - * Does id parsing really needed at all? - */ - public boolean shouldParseId() { - return shouldParseId; - } - - /** - * Has id been resolved during the parsing phase. - */ - public boolean idResolved() { - return idResolved; - } - - /** - * Is id parsing still needed? - */ - public boolean idParsingStillNeeded() { - return shouldParseId && !idResolved; - } - - /** - * The routing value parsed, null if does not require parsing, or not resolved. - */ - public String routing() { - return routing; - } - - /** - * Does routing parsing really needed at all? - */ - public boolean shouldParseRouting() { - return shouldParseRouting; - } - - /** - * Has routing been resolved during the parsing phase. - */ - public boolean routingResolved() { - return routingResolved; - } - - /** - * Is routing parsing still needed? - */ - public boolean routingParsingStillNeeded() { - return shouldParseRouting && !routingResolved; - } - - /** - * The timestamp value parsed, null if does not require parsing, or not resolved. - */ - public String timestamp() { - return timestamp; - } - - /** - * Does timestamp parsing really needed at all? - */ - public boolean shouldParseTimestamp() { - return shouldParseTimestamp; - } - - /** - * Has timestamp been resolved during the parsing phase. - */ - public boolean timestampResolved() { - return timestampResolved; - } - - /** - * Is timestamp parsing still needed? - */ - public boolean timestampParsingStillNeeded() { - return shouldParseTimestamp && !timestampResolved; - } - - /** - * Do we really need parsing? - */ - public boolean shouldParse() { - return shouldParseId || shouldParseRouting || shouldParseTimestamp; - } - - /** - * Is parsing still needed? - */ - public boolean parsingStillNeeded() { - return idParsingStillNeeded() || routingParsingStillNeeded() || timestampParsingStillNeeded(); - } - } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 751f8a09ea5..d904a3ca3ea 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; @@ -40,8 +39,8 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.support.LoggerMessageFormat; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.FromXContentBuilder; @@ -134,13 +133,13 @@ public class MetaData implements Iterable, Diffable, Fr //noinspection unchecked T proto = (T) customPrototypes.get(type); if (proto == null) { - throw new IllegalArgumentException("No custom metadata prototype registered for type [" + type + "]"); + throw new IllegalArgumentException("No custom metadata prototype registered for type [" + type + "], node likely missing plugins"); } return proto; } - public static final String SETTING_READ_ONLY = "cluster.blocks.read_only"; + public static final Setting SETTING_READ_ONLY_SETTING = Setting.boolSetting("cluster.blocks.read_only", false, true, Setting.Scope.CLUSTER); public static final ClusterBlock CLUSTER_READ_ONLY_BLOCK = new ClusterBlock(6, "cluster read-only (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); @@ -441,13 +440,19 @@ public class MetaData implements Iterable, Diffable, Fr */ // TODO: This can be moved to IndexNameExpressionResolver too, but this means that we will support wildcards and other expressions // in the index,bulk,update and delete apis. - public String resolveIndexRouting(@Nullable String routing, String aliasOrIndex) { + public String resolveIndexRouting(@Nullable String parent, @Nullable String routing, String aliasOrIndex) { if (aliasOrIndex == null) { + if (routing == null) { + return parent; + } return routing; } AliasOrIndex result = getAliasAndIndexLookup().get(aliasOrIndex); if (result == null || result.isAlias() == false) { + if (routing == null) { + return parent; + } return routing; } AliasOrIndex.Alias alias = (AliasOrIndex.Alias) result; @@ -461,17 +466,19 @@ public class MetaData implements Iterable, Diffable, Fr } AliasMetaData aliasMd = alias.getFirstAliasMetaData(); if (aliasMd.indexRouting() != null) { + if (aliasMd.indexRouting().indexOf(',') != -1) { + throw new IllegalArgumentException("index/alias [" + aliasOrIndex + "] provided with routing value [" + aliasMd.getIndexRouting() + "] that resolved to several routing values, rejecting operation"); + } if (routing != null) { if (!routing.equals(aliasMd.indexRouting())) { throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has index routing associated with it [" + aliasMd.indexRouting() + "], and was provided with routing value [" + routing + "], rejecting operation"); } } - routing = aliasMd.indexRouting(); + // Alias routing overrides the parent routing (if any). + return aliasMd.indexRouting(); } - if (routing != null) { - if (routing.indexOf(',') != -1) { - throw new IllegalArgumentException("index/alias [" + aliasOrIndex + "] provided with routing value [" + routing + "] that resolved to several routing values, rejecting operation"); - } + if (routing == null) { + return parent; } return routing; } @@ -745,23 +752,23 @@ public class MetaData implements Iterable, Diffable, Fr /** All known byte-sized cluster settings. */ public static final Set CLUSTER_BYTES_SIZE_SETTINGS = unmodifiableSet(newHashSet( - IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, - RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC)); + IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey())); /** All known time cluster settings. */ public static final Set CLUSTER_TIME_SETTINGS = unmodifiableSet(newHashSet( - IndicesTTLService.INDICES_TTL_INTERVAL, - RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, - RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, - RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT, - RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, - RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, - DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, - InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, - InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT, - DiscoverySettings.PUBLISH_TIMEOUT, - InternalClusterService.SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD)); + IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), + RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getKey(), + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.getKey(), + InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.getKey(), + InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.getKey(), + DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), + InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey())); /** As of 2.0 we require units for time and byte-sized settings. This methods adds default units to any cluster settings that don't * specify a unit. */ diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 96d378af042..b2c9e500f66 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -21,7 +21,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -71,7 +70,14 @@ import org.joda.time.DateTimeZone; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index 71ef9c22c33..32a66bfb764 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -37,7 +37,11 @@ import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.IndicesService; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * Service responsible for submitting add and remove aliases requests diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index 1fa1b702f66..df26df29800 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -29,25 +29,19 @@ import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.routing.IndexRoutingTable; -import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException; import org.elasticsearch.rest.RestStatus; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Locale; /** * Service responsible for submitting open/close index requests @@ -92,14 +86,6 @@ public class MetaDataIndexStateService extends AbstractComponent { } if (indexMetaData.getState() != IndexMetaData.State.CLOSE) { - IndexRoutingTable indexRoutingTable = currentState.routingTable().index(index); - for (IndexShardRoutingTable shard : indexRoutingTable) { - for (ShardRouting shardRouting : shard) { - if (shardRouting.primary() == true && shardRouting.allocatedPostIndexCreate() == false) { - throw new IndexPrimaryShardNotAllocatedException(new Index(index)); - } - } - } indicesToClose.add(index); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index 790cb99c64b..da2fc064dc4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -36,7 +36,13 @@ import org.elasticsearch.indices.IndexTemplateAlreadyExistsException; import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.indices.InvalidIndexTemplateException; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; /** * Service responsible for submitting index templates updates diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index 00904af8915..7bd83b5ddd3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; import org.elasticsearch.cluster.routing.UnassignedInfo; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 957125703b6..14f9f500c45 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -22,7 +22,11 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.AckedClusterStateTaskListener; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; @@ -37,13 +41,18 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.percolator.PercolatorService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * Service responsible for submitting mapping changes */ @@ -237,8 +246,8 @@ public class MetaDataMappingService extends AbstractComponent { } private ClusterState applyRequest(ClusterState currentState, PutMappingClusterStateUpdateRequest request) throws IOException { - Map newMappers = new HashMap<>(); - Map existingMappers = new HashMap<>(); + String mappingType = request.type(); + CompressedXContent mappingUpdateSource = new CompressedXContent(request.source()); for (String index : request.indices()) { IndexService indexService = indicesService.indexServiceSafe(index); // try and parse it (no need to add it here) so we can bail early in case of parsing exception @@ -246,16 +255,12 @@ public class MetaDataMappingService extends AbstractComponent { DocumentMapper existingMapper = indexService.mapperService().documentMapper(request.type()); if (MapperService.DEFAULT_MAPPING.equals(request.type())) { // _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default - newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), false); + newMapper = indexService.mapperService().parse(request.type(), mappingUpdateSource, false); } else { - newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null); + newMapper = indexService.mapperService().parse(request.type(), mappingUpdateSource, existingMapper == null); if (existingMapper != null) { - // first, simulate - MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes()); - // if we have conflicts, throw an exception - if (mergeResult.hasConflicts()) { - throw new IllegalArgumentException("Merge failed with failures {" + Arrays.toString(mergeResult.buildConflicts()) + "}"); - } + // first, simulate: just call merge and ignore the result + existingMapper.merge(newMapper.mapping(), request.updateAllTypes()); } else { // TODO: can we find a better place for this validation? // The reason this validation is here is that the mapper service doesn't learn about @@ -274,36 +279,31 @@ public class MetaDataMappingService extends AbstractComponent { } } } - newMappers.put(index, newMapper); - if (existingMapper != null) { - existingMappers.put(index, existingMapper); + if (mappingType == null) { + mappingType = newMapper.type(); + } else if (mappingType.equals(newMapper.type()) == false) { + throw new InvalidTypeNameException("Type name provided does not match type name within mapping definition"); } } + assert mappingType != null; - String mappingType = request.type(); - if (mappingType == null) { - mappingType = newMappers.values().iterator().next().type(); - } else if (!mappingType.equals(newMappers.values().iterator().next().type())) { - throw new InvalidTypeNameException("Type name provided does not match type name within mapping definition"); - } if (!MapperService.DEFAULT_MAPPING.equals(mappingType) && !PercolatorService.TYPE_NAME.equals(mappingType) && mappingType.charAt(0) == '_') { throw new InvalidTypeNameException("Document mapping type name can't start with '_'"); } final Map mappings = new HashMap<>(); - for (Map.Entry entry : newMappers.entrySet()) { - String index = entry.getKey(); + for (String index : request.indices()) { // do the actual merge here on the master, and update the mapping source - DocumentMapper newMapper = entry.getValue(); IndexService indexService = indicesService.indexService(index); if (indexService == null) { continue; } CompressedXContent existingSource = null; - if (existingMappers.containsKey(entry.getKey())) { - existingSource = existingMappers.get(entry.getKey()).mappingSource(); + DocumentMapper existingMapper = indexService.mapperService().documentMapper(mappingType); + if (existingMapper != null) { + existingSource = existingMapper.mappingSource(); } - DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false, request.updateAllTypes()); + DocumentMapper mergedMapper = indexService.mapperService().merge(mappingType, mappingUpdateSource, true, request.updateAllTypes()); CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { @@ -322,9 +322,9 @@ public class MetaDataMappingService extends AbstractComponent { } else { mappings.put(index, new MappingMetaData(mergedMapper)); if (logger.isDebugEnabled()) { - logger.debug("[{}] create_mapping [{}] with source [{}]", index, newMapper.type(), updatedSource); + logger.debug("[{}] create_mapping [{}] with source [{}]", index, mappingType, updatedSource); } else if (logger.isInfoEnabled()) { - logger.info("[{}] create_mapping [{}]", index, newMapper.type()); + logger.info("[{}] create_mapping [{}]", index, mappingType); } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index eaa1eefd25e..35c9c51143f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -24,7 +24,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsClusterStateUpdateRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.AckedClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.routing.RoutingTable; @@ -40,7 +44,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.settings.IndexDynamicSettings; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.common.settings.Settings.settingsBuilder; diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index 780f511d912..7dce2172879 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.node; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.Version; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; @@ -46,12 +45,6 @@ import static org.elasticsearch.common.transport.TransportAddressSerializers.add */ public class DiscoveryNode implements Streamable, ToXContent { - /** - * Minimum version of a node to communicate with. This version corresponds to the minimum compatibility version - * of the current elasticsearch major version. - */ - public static final Version MINIMUM_DISCOVERY_NODE_VERSION = Version.CURRENT.minimumCompatibilityVersion(); - public static boolean localNode(Settings settings) { if (settings.get("node.local") != null) { return settings.getAsBoolean("node.local", false); @@ -110,7 +103,7 @@ public class DiscoveryNode implements Streamable, ToXContent { /** * Creates a new {@link DiscoveryNode} *

- * Note: if the version of the node is unknown {@link #MINIMUM_DISCOVERY_NODE_VERSION} should be used. + * Note: if the version of the node is unknown {@link Version#minimumCompatibilityVersion()} should be used for the current version. * it corresponds to the minimum version this elasticsearch version can communicate with. If a higher version is used * the node might not be able to communicate with the remove node. After initial handshakes node versions will be discovered * and updated. @@ -127,7 +120,7 @@ public class DiscoveryNode implements Streamable, ToXContent { /** * Creates a new {@link DiscoveryNode} *

- * Note: if the version of the node is unknown {@link #MINIMUM_DISCOVERY_NODE_VERSION} should be used. + * Note: if the version of the node is unknown {@link Version#minimumCompatibilityVersion()} should be used for the current version. * it corresponds to the minimum version this elasticsearch version can communicate with. If a higher version is used * the node might not be able to communicate with the remove node. After initial handshakes node versions will be discovered * and updated. @@ -146,7 +139,7 @@ public class DiscoveryNode implements Streamable, ToXContent { /** * Creates a new {@link DiscoveryNode}. *

- * Note: if the version of the node is unknown {@link #MINIMUM_DISCOVERY_NODE_VERSION} should be used. + * Note: if the version of the node is unknown {@link Version#minimumCompatibilityVersion()} should be used for the current version. * it corresponds to the minimum version this elasticsearch version can communicate with. If a higher version is used * the node might not be able to communicate with the remove node. After initial handshakes node versions will be discovered * and updated. @@ -179,7 +172,7 @@ public class DiscoveryNode implements Streamable, ToXContent { /** * Creates a new {@link DiscoveryNode}. *

- * Note: if the version of the node is unknown {@link #MINIMUM_DISCOVERY_NODE_VERSION} should be used. + * Note: if the version of the node is unknown {@link Version#minimumCompatibilityVersion()} should be used for the current version. * it corresponds to the minimum version this elasticsearch version can communicate with. If a higher version is used * the node might not be able to communicate with the remove node. After initial handshakes node versions will be discovered * and updated. diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 16b7e9e1a13..0bb64220f1f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.node; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.Booleans; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java b/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java index f391bf3d667..9cf429383fd 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java @@ -21,7 +21,8 @@ package org.elasticsearch.cluster.routing; import org.apache.lucene.util.CollectionUtil; -import java.util.*; +import java.util.Iterator; +import java.util.List; /** * This class implements a compilation of {@link ShardIterator}s. Each {@link ShardIterator} @@ -42,7 +43,7 @@ public class GroupShardsIterator implements Iterable { } /** - * Returns the total number of shards within all groups + * Returns the total number of shards within all groups * @return total number of shards */ public int totalSize() { @@ -55,7 +56,7 @@ public class GroupShardsIterator implements Iterable { /** * Returns the total number of shards plus the number of empty groups - * @return number of shards and empty groups + * @return number of shards and empty groups */ public int totalSizeWith1ForEmpty() { int size = 0; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index ca071c811e3..bcf489c6a2c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -32,7 +32,13 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; import java.util.concurrent.ThreadLocalRandom; /** diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index 6512ee5cef7..d425b63b34c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -106,7 +106,6 @@ public class IndexShardRoutingTable implements Iterable { } } this.allShardsStarted = allShardsStarted; - this.primary = primary; if (primary != null) { this.primaryAsList = Collections.singletonList(primary); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java index 596bb97887c..ff6c8293420 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java @@ -20,9 +20,12 @@ package org.elasticsearch.cluster.routing; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.collect.Iterators; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; /** * A {@link RoutingNode} represents a cluster node associated with a single {@link DiscoveryNode} including all shards @@ -57,7 +60,7 @@ public class RoutingNode implements Iterable { /** * Returns the nodes {@link DiscoveryNode}. - * + * * @return discoveryNode of this node */ public DiscoveryNode node() { @@ -65,7 +68,7 @@ public class RoutingNode implements Iterable { } /** - * Get the id of this node + * Get the id of this node * @return id of the node */ public String nodeId() { @@ -93,7 +96,7 @@ public class RoutingNode implements Iterable { /** * Determine the number of shards with a specific state * @param states set of states which should be counted - * @return number of shards + * @return number of shards */ public int numberOfShardsWithState(ShardRoutingState... states) { int count = 0; @@ -110,7 +113,7 @@ public class RoutingNode implements Iterable { /** * Determine the shards with a specific state * @param states set of states which should be listed - * @return List of shards + * @return List of shards */ public List shardsWithState(ShardRoutingState... states) { List shards = new ArrayList<>(); @@ -125,7 +128,7 @@ public class RoutingNode implements Iterable { } /** - * Determine the shards of an index with a specific state + * Determine the shards of an index with a specific state * @param index id of the index * @param states set of states which should be listed * @return a list of shards diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index 5d17a59339a..3a2567e3f46 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -21,7 +21,6 @@ package org.elasticsearch.cluster.routing; import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -31,7 +30,14 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.index.shard.ShardId; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.function.Predicate; /** @@ -63,6 +69,7 @@ public class RoutingNodes implements Iterable { private int relocatingShards = 0; private final Map> nodesPerAttributeNames = new HashMap<>(); + private final Map recoveryiesPerNode = new HashMap<>(); public RoutingNodes(ClusterState clusterState) { this(clusterState, true); @@ -78,50 +85,45 @@ public class RoutingNodes implements Iterable { Map> nodesToShards = new HashMap<>(); // fill in the nodeToShards with the "live" nodes for (ObjectCursor cursor : clusterState.nodes().dataNodes().values()) { - nodesToShards.put(cursor.value.id(), new ArrayList()); + nodesToShards.put(cursor.value.id(), new ArrayList<>()); } // fill in the inverse of node -> shards allocated // also fill replicaSet information for (ObjectCursor indexRoutingTable : routingTable.indicesRouting().values()) { for (IndexShardRoutingTable indexShard : indexRoutingTable.value) { + assert indexShard.primary != null; for (ShardRouting shard : indexShard) { // to get all the shards belonging to an index, including the replicas, // we define a replica set and keep track of it. A replica set is identified // by the ShardId, as this is common for primary and replicas. // A replica Set might have one (and not more) replicas with the state of RELOCATING. if (shard.assignedToNode()) { - List entries = nodesToShards.get(shard.currentNodeId()); - if (entries == null) { - entries = new ArrayList<>(); - nodesToShards.put(shard.currentNodeId(), entries); - } + List entries = nodesToShards.computeIfAbsent(shard.currentNodeId(), k -> new ArrayList<>()); final ShardRouting sr = getRouting(shard, readOnly); entries.add(sr); assignedShardsAdd(sr); if (shard.relocating()) { - entries = nodesToShards.get(shard.relocatingNodeId()); relocatingShards++; - if (entries == null) { - entries = new ArrayList<>(); - nodesToShards.put(shard.relocatingNodeId(), entries); - } + entries = nodesToShards.computeIfAbsent(shard.relocatingNodeId(), k -> new ArrayList<>()); // add the counterpart shard with relocatingNodeId reflecting the source from which // it's relocating from. ShardRouting targetShardRouting = shard.buildTargetRelocatingShard(); + addInitialRecovery(targetShardRouting); if (readOnly) { targetShardRouting.freeze(); } entries.add(targetShardRouting); assignedShardsAdd(targetShardRouting); - } else if (!shard.active()) { // shards that are initializing without being relocated + } else if (shard.active() == false) { // shards that are initializing without being relocated if (shard.primary()) { inactivePrimaryCount++; } inactiveShardCount++; + addInitialRecovery(shard); } } else { - final ShardRouting sr = getRouting(shard, readOnly); + final ShardRouting sr = getRouting(shard, readOnly); assignedShardsAdd(sr); unassignedShards.add(sr); } @@ -134,6 +136,79 @@ public class RoutingNodes implements Iterable { } } + private void addRecovery(ShardRouting routing) { + addRecovery(routing, true, false); + } + + private void removeRecovery(ShardRouting routing) { + addRecovery(routing, false, false); + } + + public void addInitialRecovery(ShardRouting routing) { + addRecovery(routing,true, true); + } + + private void addRecovery(final ShardRouting routing, final boolean increment, final boolean initializing) { + final int howMany = increment ? 1 : -1; + assert routing.initializing() : "routing must be initializing: " + routing; + Recoveries.getOrAdd(recoveryiesPerNode, routing.currentNodeId()).addIncoming(howMany); + final String sourceNodeId; + if (routing.relocatingNodeId() != null) { // this is a relocation-target + sourceNodeId = routing.relocatingNodeId(); + if (routing.primary() && increment == false) { // primary is done relocating + int numRecoveringReplicas = 0; + for (ShardRouting assigned : assignedShards(routing)) { + if (assigned.primary() == false && assigned.initializing() && assigned.relocatingNodeId() == null) { + numRecoveringReplicas++; + } + } + // we transfer the recoveries to the relocated primary + recoveryiesPerNode.get(sourceNodeId).addOutgoing(-numRecoveringReplicas); + recoveryiesPerNode.get(routing.currentNodeId()).addOutgoing(numRecoveringReplicas); + } + } else if (routing.primary() == false) { // primary without relocationID is initial recovery + ShardRouting primary = findPrimary(routing); + if (primary == null && initializing) { + primary = routingTable.index(routing.index()).shard(routing.shardId().id()).primary; + } else if (primary == null) { + throw new IllegalStateException("replica is initializing but primary is unassigned"); + } + sourceNodeId = primary.currentNodeId(); + } else { + sourceNodeId = null; + } + if (sourceNodeId != null) { + Recoveries.getOrAdd(recoveryiesPerNode, sourceNodeId).addOutgoing(howMany); + } + } + + public int getIncomingRecoveries(String nodeId) { + return recoveryiesPerNode.getOrDefault(nodeId, Recoveries.EMPTY).getIncoming(); + } + + public int getOutgoingRecoveries(String nodeId) { + return recoveryiesPerNode.getOrDefault(nodeId, Recoveries.EMPTY).getOutgoing(); + } + + private ShardRouting findPrimary(ShardRouting routing) { + List shardRoutings = assignedShards.get(routing.shardId()); + ShardRouting primary = null; + if (shardRoutings != null) { + for (ShardRouting shardRouting : shardRoutings) { + if (shardRouting.primary()) { + if (shardRouting.active()) { + return shardRouting; + } else if (primary == null) { + primary = shardRouting; + } else if (primary.relocatingNodeId() != null) { + primary = shardRouting; + } + } + } + } + return primary; + } + private static ShardRouting getRouting(ShardRouting src, boolean readOnly) { if (readOnly) { src.freeze(); // we just freeze and reuse this instance if we are read only @@ -354,6 +429,7 @@ public class RoutingNodes implements Iterable { if (shard.primary()) { inactivePrimaryCount++; } + addRecovery(shard); assignedShardsAdd(shard); } @@ -369,6 +445,7 @@ public class RoutingNodes implements Iterable { ShardRouting target = shard.buildTargetRelocatingShard(); node(target.currentNodeId()).add(target); assignedShardsAdd(target); + addRecovery(target); return target; } @@ -385,9 +462,12 @@ public class RoutingNodes implements Iterable { inactivePrimaryCount--; } } + removeRecovery(shard); shard.moveToStarted(); } + + /** * Cancels a relocation of a shard that shard must relocating. */ @@ -442,6 +522,9 @@ public class RoutingNodes implements Iterable { cancelRelocation(shard); } assignedShardsRemove(shard); + if (shard.initializing()) { + removeRecovery(shard); + } } private void assignedShardsAdd(ShardRouting shard) { @@ -449,12 +532,8 @@ public class RoutingNodes implements Iterable { // no unassigned return; } - List shards = assignedShards.get(shard.shardId()); - if (shards == null) { - shards = new ArrayList<>(); - assignedShards.put(shard.shardId(), shards); - } - assert assertInstanceNotInList(shard, shards); + List shards = assignedShards.computeIfAbsent(shard.shardId(), k -> new ArrayList<>()); + assert assertInstanceNotInList(shard, shards); shards.add(shard); } @@ -755,6 +834,34 @@ public class RoutingNodes implements Iterable { } } + for (Map.Entry recoveries : routingNodes.recoveryiesPerNode.entrySet()) { + String node = recoveries.getKey(); + final Recoveries value = recoveries.getValue(); + int incoming = 0; + int outgoing = 0; + RoutingNode routingNode = routingNodes.nodesToShards.get(node); + if (routingNode != null) { // node might have dropped out of the cluster + for (ShardRouting routing : routingNode) { + if (routing.initializing()) { + incoming++; + } else if (routing.relocating()) { + outgoing++; + } + if (routing.primary() && (routing.initializing() && routing.relocatingNodeId() != null) == false) { // we don't count the initialization end of the primary relocation + List shardRoutings = routingNodes.assignedShards.get(routing.shardId()); + for (ShardRouting assigned : shardRoutings) { + if (assigned.primary() == false && assigned.initializing() && assigned.relocatingNodeId() == null) { + outgoing++; + } + } + } + } + } + assert incoming == value.incoming : incoming + " != " + value.incoming; + assert outgoing == value.outgoing : outgoing + " != " + value.outgoing + " node: " + routingNode; + } + + assert unassignedPrimaryCount == routingNodes.unassignedShards.getNumPrimaries() : "Unassigned primaries is [" + unassignedPrimaryCount + "] but RoutingNodes returned unassigned primaries [" + routingNodes.unassigned().getNumPrimaries() + "]"; assert unassignedIgnoredPrimaryCount == routingNodes.unassignedShards.getNumIgnoredPrimaries() : @@ -862,4 +969,41 @@ public class RoutingNodes implements Iterable { throw new IllegalStateException("can't modify RoutingNodes - readonly"); } } + + private static final class Recoveries { + private static final Recoveries EMPTY = new Recoveries(); + private int incoming = 0; + private int outgoing = 0; + + int getTotal() { + return incoming + outgoing; + } + + void addOutgoing(int howMany) { + assert outgoing + howMany >= 0 : outgoing + howMany+ " must be >= 0"; + outgoing += howMany; + } + + void addIncoming(int howMany) { + assert incoming + howMany >= 0 : incoming + howMany+ " must be >= 0"; + incoming += howMany; + } + + int getOutgoing() { + return outgoing; + } + + int getIncoming() { + return incoming; + } + + public static Recoveries getOrAdd(Map map, String key) { + Recoveries recoveries = map.get(key); + if (recoveries == null) { + recoveries = new Recoveries(); + map.put(key, recoveries); + } + return recoveries; + } + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java index 5cd4366bea4..c683f0200dc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java @@ -19,7 +19,11 @@ package org.elasticsearch.cluster.routing; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Priority; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index fbabacd79fd..0fb7513f73f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.routing; import com.carrotsearch.hppc.IntSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; import org.elasticsearch.cluster.DiffableUtils; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index 8dd71e3fba5..5ffaee0f2f9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -19,6 +19,8 @@ package org.elasticsearch.cluster.routing; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -267,7 +269,7 @@ public final class ShardRouting implements Streamable, ToXContent { return shardIdentifier; } - public boolean allocatedPostIndexCreate() { + public boolean allocatedPostIndexCreate(IndexMetaData indexMetaData) { if (active()) { return true; } @@ -279,6 +281,11 @@ public final class ShardRouting implements Streamable, ToXContent { return false; } + if (indexMetaData.activeAllocationIds(id()).isEmpty() && indexMetaData.getCreationVersion().onOrAfter(Version.V_3_0_0)) { + // when no shards with this id have ever been active for this index + return false; + } + return true; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index 2268bf1d995..25937595556 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; +import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -39,12 +40,16 @@ import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; -import org.elasticsearch.common.Randomness; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Objects; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; @@ -177,7 +182,10 @@ public class AllocationService extends AbstractComponent { routingNodes.unassigned().shuffle(); FailedRerouteAllocation allocation = new FailedRerouteAllocation(allocationDeciders, routingNodes, clusterState.nodes(), failedShards, clusterInfoService.getClusterInfo()); boolean changed = false; - for (FailedRerouteAllocation.FailedShard failedShard : failedShards) { + // as failing primaries also fail associated replicas, we fail replicas first here so that their nodes are added to ignore list + List orderedFailedShards = new ArrayList<>(failedShards); + orderedFailedShards.sort(Comparator.comparing(failedShard -> failedShard.shard.primary())); + for (FailedRerouteAllocation.FailedShard failedShard : orderedFailedShards) { changed |= applyFailedShard(allocation, failedShard.shard, true, new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, failedShard.message, failedShard.failure, System.nanoTime(), System.currentTimeMillis())); } @@ -361,35 +369,17 @@ public class AllocationService extends AbstractComponent { private boolean electPrimariesAndUnassignedDanglingReplicas(RoutingAllocation allocation) { boolean changed = false; - RoutingNodes routingNodes = allocation.routingNodes(); + final RoutingNodes routingNodes = allocation.routingNodes(); if (routingNodes.unassigned().getNumPrimaries() == 0) { // move out if we don't have unassigned primaries return changed; } - - // go over and remove dangling replicas that are initializing for primary shards - List shardsToFail = new ArrayList<>(); - for (ShardRouting shardEntry : routingNodes.unassigned()) { - if (shardEntry.primary()) { - for (ShardRouting routing : routingNodes.assignedShards(shardEntry)) { - if (!routing.primary() && routing.initializing()) { - shardsToFail.add(routing); - } - } - - } - } - for (ShardRouting shardToFail : shardsToFail) { - changed |= applyFailedShard(allocation, shardToFail, false, - new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, "primary failed while replica initializing", - null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); - } - // now, go over and elect a new primary if possible, not, from this code block on, if one is elected, // routingNodes.hasUnassignedPrimaries() will potentially be false - for (ShardRouting shardEntry : routingNodes.unassigned()) { if (shardEntry.primary()) { + // remove dangling replicas that are initializing for primary shards + changed |= failReplicasForUnassignedPrimary(allocation, shardEntry); ShardRouting candidate = allocation.routingNodes().activeReplica(shardEntry); if (candidate != null) { IndexMetaData index = allocation.metaData().index(candidate.index()); @@ -454,6 +444,22 @@ public class AllocationService extends AbstractComponent { return changed; } + private boolean failReplicasForUnassignedPrimary(RoutingAllocation allocation, ShardRouting primary) { + List replicas = new ArrayList<>(); + for (ShardRouting routing : allocation.routingNodes().assignedShards(primary)) { + if (!routing.primary() && routing.initializing()) { + replicas.add(routing); + } + } + boolean changed = false; + for (ShardRouting routing : replicas) { + changed |= applyFailedShard(allocation, routing, false, + new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, "primary failed while replica initializing", + null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); + } + return changed; + } + private boolean applyStartedShards(RoutingNodes routingNodes, Iterable startedShardEntries) { boolean dirty = false; // apply shards might be called several times with the same shard, ignore it @@ -520,7 +526,6 @@ public class AllocationService extends AbstractComponent { logger.debug("{} ignoring shard failure, unknown index in {} ({})", failedShard.shardId(), failedShard, unassignedInfo.shortSummary()); return false; } - RoutingNodes routingNodes = allocation.routingNodes(); RoutingNodes.RoutingNodeIterator matchedNode = routingNodes.routingNodeIter(failedShard.currentNodeId()); @@ -543,7 +548,10 @@ public class AllocationService extends AbstractComponent { logger.debug("{} ignoring shard failure, unknown allocation id in {} ({})", failedShard.shardId(), failedShard, unassignedInfo.shortSummary()); return false; } - + if (failedShard.primary()) { + // fail replicas first otherwise we move RoutingNodes into an inconsistent state + failReplicasForUnassignedPrimary(allocation, failedShard); + } // replace incoming instance to make sure we work on the latest one. Copy it to maintain information during modifications. failedShard = new ShardRouting(matchedNode.current()); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index b9ce532a611..80f634e13cf 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -20,9 +20,9 @@ package org.elasticsearch.cluster.routing.allocation.allocator; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.IntroSorter; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; @@ -37,9 +37,10 @@ import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.PriorityComparator; -import org.elasticsearch.node.settings.NodeSettingsService; import java.util.ArrayList; import java.util.Collection; @@ -72,42 +73,32 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; */ public class BalancedShardsAllocator extends AbstractComponent implements ShardsAllocator { - public static final String SETTING_THRESHOLD = "cluster.routing.allocation.balance.threshold"; - public static final String SETTING_INDEX_BALANCE_FACTOR = "cluster.routing.allocation.balance.index"; - public static final String SETTING_SHARD_BALANCE_FACTOR = "cluster.routing.allocation.balance.shard"; - - private static final float DEFAULT_INDEX_BALANCE_FACTOR = 0.55f; - private static final float DEFAULT_SHARD_BALANCE_FACTOR = 0.45f; - - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - final float indexBalance = settings.getAsFloat(SETTING_INDEX_BALANCE_FACTOR, weightFunction.indexBalance); - final float shardBalance = settings.getAsFloat(SETTING_SHARD_BALANCE_FACTOR, weightFunction.shardBalance); - float threshold = settings.getAsFloat(SETTING_THRESHOLD, BalancedShardsAllocator.this.threshold); - if (threshold <= 0.0f) { - throw new IllegalArgumentException("threshold must be greater than 0.0f but was: " + threshold); - } - BalancedShardsAllocator.this.threshold = threshold; - BalancedShardsAllocator.this.weightFunction = new WeightFunction(indexBalance, shardBalance); - } - } - - private volatile WeightFunction weightFunction = new WeightFunction(DEFAULT_INDEX_BALANCE_FACTOR, DEFAULT_SHARD_BALANCE_FACTOR); - - private volatile float threshold = 1.0f; + public static final Setting INDEX_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.index", 0.55f, true, Setting.Scope.CLUSTER); + public static final Setting SHARD_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.shard", 0.45f, true, Setting.Scope.CLUSTER); + public static final Setting THRESHOLD_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.threshold", 1.0f, 0.0f, true, Setting.Scope.CLUSTER); + private volatile WeightFunction weightFunction; + private volatile float threshold; public BalancedShardsAllocator(Settings settings) { - this(settings, new NodeSettingsService(settings)); + this(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); } @Inject - public BalancedShardsAllocator(Settings settings, NodeSettingsService nodeSettingsService) { + public BalancedShardsAllocator(Settings settings, ClusterSettings clusterSettings) { super(settings); - ApplySettings applySettings = new ApplySettings(); - applySettings.onRefreshSettings(settings); - nodeSettingsService.addListener(applySettings); + setWeightFunction(INDEX_BALANCE_FACTOR_SETTING.get(settings), SHARD_BALANCE_FACTOR_SETTING.get(settings)); + setThreshold(THRESHOLD_SETTING.get(settings)); + clusterSettings.addSettingsUpdateConsumer(INDEX_BALANCE_FACTOR_SETTING, SHARD_BALANCE_FACTOR_SETTING, this::setWeightFunction); + clusterSettings.addSettingsUpdateConsumer(THRESHOLD_SETTING, this::setThreshold); + } + + private void setWeightFunction(float indexBalance, float shardBalanceFactor) { + weightFunction = new WeightFunction(indexBalance, shardBalanceFactor); + } + + private void setThreshold(float threshold) { + this.threshold = threshold; } @Override @@ -183,7 +174,8 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards private final float indexBalance; private final float shardBalance; - private final float[] theta; + private final float theta0; + private final float theta1; public WeightFunction(float indexBalance, float shardBalance) { @@ -191,37 +183,30 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (sum <= 0.0f) { throw new IllegalArgumentException("Balance factors must sum to a value > 0 but was: " + sum); } - theta = new float[]{shardBalance / sum, indexBalance / sum}; + theta0 = shardBalance / sum; + theta1 = indexBalance / sum; this.indexBalance = indexBalance; this.shardBalance = shardBalance; } - public float weight(Operation operation, Balancer balancer, ModelNode node, String index) { - final float weightShard = (node.numShards() - balancer.avgShardsPerNode()); - final float weightIndex = (node.numShards(index) - balancer.avgShardsPerNode(index)); - assert theta != null; - return theta[0] * weightShard + theta[1] * weightIndex; + public float weight(Balancer balancer, ModelNode node, String index) { + return weight(balancer, node, index, 0); } - } + public float weightShardAdded(Balancer balancer, ModelNode node, String index) { + return weight(balancer, node, index, 1); + } + + public float weightShardRemoved(Balancer balancer, ModelNode node, String index) { + return weight(balancer, node, index, -1); + } + + private float weight(Balancer balancer, ModelNode node, String index, int numAdditionalShards) { + final float weightShard = (node.numShards() + numAdditionalShards - balancer.avgShardsPerNode()); + final float weightIndex = (node.numShards(index) + numAdditionalShards - balancer.avgShardsPerNode(index)); + return theta0 * weightShard + theta1 * weightIndex; + } - /** - * An enum that donates the actual operation the {@link WeightFunction} is - * applied to. - */ - public static enum Operation { - /** - * Provided during balance operations. - */ - BALANCE, - /** - * Provided during initial allocation operation for unassigned shards. - */ - ALLOCATE, - /** - * Provided during move operation. - */ - MOVE } /** @@ -237,6 +222,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards private final float threshold; private final MetaData metaData; + private final float avgShardsPerNode; private final Predicate assignedFilter = shard -> shard.assignedToNode(); @@ -250,6 +236,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards nodes.put(node.nodeId(), new ModelNode(node.nodeId())); } metaData = routingNodes.metaData(); + avgShardsPerNode = ((float) metaData.totalNumberOfShards()) / nodes.size(); } /** @@ -270,21 +257,13 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * Returns the global average of shards per node */ public float avgShardsPerNode() { - return ((float) metaData.totalNumberOfShards()) / nodes.size(); + return avgShardsPerNode; } - /** - * Returns the global average of primaries per node - */ - public float avgPrimariesPerNode() { - return ((float) metaData.numberOfShards()) / nodes.size(); - } - - /** * Returns a new {@link NodeSorter} that sorts the nodes based on their * current weight with respect to the index passed to the sorter. The - * returned sorter is not sorted. Use {@link NodeSorter#reset(org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.Operation, String)} + * returned sorter is not sorted. Use {@link NodeSorter#reset(String)} * to sort based on an index. */ private NodeSorter newNodeSorter() { @@ -358,12 +337,33 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (onlyAssign == false && changed == false && allocation.deciders().canRebalance(allocation).type() == Type.YES) { NodeSorter sorter = newNodeSorter(); if (nodes.size() > 1) { /* skip if we only have one node */ - for (String index : buildWeightOrderedIndidces(Operation.BALANCE, sorter)) { - sorter.reset(Operation.BALANCE, index); - final float[] weights = sorter.weights; - final ModelNode[] modelNodes = sorter.modelNodes; + AllocationDeciders deciders = allocation.deciders(); + final ModelNode[] modelNodes = sorter.modelNodes; + final float[] weights = sorter.weights; + for (String index : buildWeightOrderedIndices(sorter)) { + IndexMetaData indexMetaData = metaData.index(index); + + // find nodes that have a shard of this index or where shards of this index are allowed to stay + // move these nodes to the front of modelNodes so that we can only balance based on these nodes + int relevantNodes = 0; + for (int i = 0; i < modelNodes.length; i++) { + ModelNode modelNode = modelNodes[i]; + if (modelNode.getIndex(index) != null + || deciders.canAllocate(indexMetaData, modelNode.getRoutingNode(routingNodes), allocation).type() != Type.NO) { + // swap nodes at position i and relevantNodes + modelNodes[i] = modelNodes[relevantNodes]; + modelNodes[relevantNodes] = modelNode; + relevantNodes++; + } + } + + if (relevantNodes < 2) { + continue; + } + + sorter.reset(index, 0, relevantNodes); int lowIdx = 0; - int highIdx = weights.length - 1; + int highIdx = relevantNodes - 1; while (true) { final ModelNode minNode = modelNodes[lowIdx]; final ModelNode maxNode = modelNodes[highIdx]; @@ -398,17 +398,17 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } /* pass the delta to the replication function to prevent relocations that only swap the weights of the two nodes. * a relocation must bring us closer to the balance if we only achieve the same delta the relocation is useless */ - if (tryRelocateShard(Operation.BALANCE, minNode, maxNode, index, delta)) { + if (tryRelocateShard(minNode, maxNode, index, delta)) { /* * TODO we could be a bit smarter here, we don't need to fully sort necessarily * we could just find the place to insert linearly but the win might be minor * compared to the added complexity */ - weights[lowIdx] = sorter.weight(Operation.BALANCE, modelNodes[lowIdx]); - weights[highIdx] = sorter.weight(Operation.BALANCE, modelNodes[highIdx]); - sorter.sort(0, weights.length); + weights[lowIdx] = sorter.weight(modelNodes[lowIdx]); + weights[highIdx] = sorter.weight(modelNodes[highIdx]); + sorter.sort(0, relevantNodes); lowIdx = 0; - highIdx = weights.length - 1; + highIdx = relevantNodes - 1; changed = true; continue; } @@ -449,11 +449,11 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * average. To re-balance we need to move shards back eventually likely * to the nodes we relocated them from. */ - private String[] buildWeightOrderedIndidces(Operation operation, NodeSorter sorter) { + private String[] buildWeightOrderedIndices(NodeSorter sorter) { final String[] indices = this.indices.toArray(new String[this.indices.size()]); final float[] deltas = new float[indices.length]; for (int i = 0; i < deltas.length; i++) { - sorter.reset(operation, indices[i]); + sorter.reset(indices[i]); deltas[i] = sorter.delta(); } new IntroSorter() { @@ -513,7 +513,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards final ModelNode sourceNode = nodes.get(node.nodeId()); assert sourceNode != null; final NodeSorter sorter = newNodeSorter(); - sorter.reset(Operation.MOVE, shard.getIndex()); + sorter.reset(shard.getIndex()); final ModelNode[] nodes = sorter.modelNodes; assert sourceNode.containsShard(shard); /* @@ -527,7 +527,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (currentNode.getNodeId().equals(node.nodeId())) { continue; } - RoutingNode target = routingNodes.node(currentNode.getNodeId()); + RoutingNode target = currentNode.getRoutingNode(routingNodes); Decision allocationDecision = allocation.deciders().canAllocate(shard, target, allocation); Decision rebalanceDecision = allocation.deciders().canRebalance(shard, allocation); Decision decision = new Decision.Multi().add(allocationDecision).add(rebalanceDecision); @@ -653,26 +653,15 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (throttledNodes.contains(node)) { continue; } - /* - * The shard we add is removed below to simulate the - * addition for weight calculation we use Decision.ALWAYS to - * not violate the not null condition. - */ if (!node.containsShard(shard)) { - node.addShard(shard, Decision.ALWAYS); - float currentWeight = weight.weight(Operation.ALLOCATE, this, node, shard.index()); - /* - * Remove the shard from the node again this is only a - * simulation - */ - Decision removed = node.removeShard(shard); - assert removed != null; + // simulate weight if we would add shard to node + float currentWeight = weight.weightShardAdded(this, node, shard.index()); /* * Unless the operation is not providing any gains we * don't check deciders */ if (currentWeight <= minWeight) { - Decision currentDecision = deciders.canAllocate(shard, routingNodes.node(node.getNodeId()), allocation); + Decision currentDecision = deciders.canAllocate(shard, node.getRoutingNode(routingNodes), allocation); NOUPDATE: if (currentDecision.type() == Type.YES || currentDecision.type() == Type.THROTTLE) { if (currentWeight == minWeight) { @@ -718,11 +707,11 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (logger.isTraceEnabled()) { logger.trace("Assigned shard [{}] to [{}]", shard, minNode.getNodeId()); } - routingNodes.initialize(shard, routingNodes.node(minNode.getNodeId()).nodeId(), allocation.clusterInfo().getShardSize(shard, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); + routingNodes.initialize(shard, minNode.getNodeId(), allocation.clusterInfo().getShardSize(shard, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); changed = true; continue; // don't add to ignoreUnassigned } else { - final RoutingNode node = routingNodes.node(minNode.getNodeId()); + final RoutingNode node = minNode.getRoutingNode(routingNodes); if (deciders.canAllocate(node, allocation).type() != Type.YES) { if (logger.isTraceEnabled()) { logger.trace("Can not allocate on node [{}] remove from round decision [{}]", node, decision.type()); @@ -758,7 +747,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * balance model. Iff this method returns a true the relocation has already been executed on the * simulation model as well as on the cluster. */ - private boolean tryRelocateShard(Operation operation, ModelNode minNode, ModelNode maxNode, String idx, float minCost) { + private boolean tryRelocateShard(ModelNode minNode, ModelNode maxNode, String idx, float minCost) { final ModelIndex index = maxNode.getIndex(idx); Decision decision = null; if (index != null) { @@ -766,22 +755,18 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards logger.trace("Try relocating shard for index index [{}] from node [{}] to node [{}]", idx, maxNode.getNodeId(), minNode.getNodeId()); } - final RoutingNode node = routingNodes.node(minNode.getNodeId()); ShardRouting candidate = null; final AllocationDeciders deciders = allocation.deciders(); - /* make a copy since we modify this list in the loop */ - final ArrayList shards = new ArrayList<>(index.getAllShards()); - for (ShardRouting shard : shards) { + for (ShardRouting shard : index.getAllShards()) { if (shard.started()) { // skip initializing, unassigned and relocating shards we can't relocate them anyway - Decision allocationDecision = deciders.canAllocate(shard, node, allocation); + Decision allocationDecision = deciders.canAllocate(shard, minNode.getRoutingNode(routingNodes), allocation); Decision rebalanceDecision = deciders.canRebalance(shard, allocation); if (((allocationDecision.type() == Type.YES) || (allocationDecision.type() == Type.THROTTLE)) && ((rebalanceDecision.type() == Type.YES) || (rebalanceDecision.type() == Type.THROTTLE))) { - Decision srcDecision; - if ((srcDecision = maxNode.removeShard(shard)) != null) { - minNode.addShard(shard, srcDecision); - final float delta = weight.weight(operation, this, minNode, idx) - weight.weight(operation, this, maxNode, idx); + if (maxNode.containsShard(shard)) { + // simulate moving shard from maxNode to minNode + final float delta = weight.weightShardAdded(this, minNode, idx) - weight.weightShardRemoved(this, maxNode, idx); if (delta < minCost || (candidate != null && delta == minCost && candidate.id() > shard.id())) { /* this last line is a tie-breaker to make the shard allocation alg deterministic @@ -790,8 +775,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards candidate = shard; decision = new Decision.Multi().add(allocationDecision).add(rebalanceDecision); } - minNode.removeShard(shard); - maxNode.addShard(shard, srcDecision); } } } @@ -809,11 +792,10 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } /* now allocate on the cluster - if we are started we need to relocate the shard */ if (candidate.started()) { - RoutingNode lowRoutingNode = routingNodes.node(minNode.getNodeId()); - routingNodes.relocate(candidate, lowRoutingNode.nodeId(), allocation.clusterInfo().getShardSize(candidate, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); + routingNodes.relocate(candidate, minNode.getNodeId(), allocation.clusterInfo().getShardSize(candidate, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); } else { - routingNodes.initialize(candidate, routingNodes.node(minNode.getNodeId()).nodeId(), allocation.clusterInfo().getShardSize(candidate, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); + routingNodes.initialize(candidate, minNode.getNodeId(), allocation.clusterInfo().getShardSize(candidate, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); } return true; @@ -832,8 +814,9 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards static class ModelNode implements Iterable { private final String id; private final Map indices = new HashMap<>(); - /* cached stats - invalidated on add/remove and lazily calculated */ - private int numShards = -1; + private int numShards = 0; + // lazily calculated + private RoutingNode routingNode; public ModelNode(String id) { this.id = id; @@ -847,14 +830,14 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return id; } - public int numShards() { - if (numShards == -1) { - int sum = 0; - for (ModelIndex index : indices.values()) { - sum += index.numShards(); - } - numShards = sum; + public RoutingNode getRoutingNode(RoutingNodes routingNodes) { + if (routingNode == null) { + routingNode = routingNodes.node(id); } + return routingNode; + } + + public int numShards() { return numShards; } @@ -863,14 +846,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return index == null ? 0 : index.numShards(); } - public Collection shards() { - Collection result = new ArrayList<>(); - for (ModelIndex index : indices.values()) { - result.addAll(index.getAllShards()); - } - return result; - } - public int highestPrimary(String index) { ModelIndex idx = indices.get(index); if (idx != null) { @@ -880,17 +855,16 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } public void addShard(ShardRouting shard, Decision decision) { - numShards = -1; ModelIndex index = indices.get(shard.index()); if (index == null) { index = new ModelIndex(shard.index()); indices.put(index.getIndexId(), index); } index.addShard(shard, decision); + numShards++; } public Decision removeShard(ShardRouting shard) { - numShards = -1; ModelIndex index = indices.get(shard.index()); Decision removed = null; if (index != null) { @@ -899,6 +873,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards indices.remove(shard.index()); } } + numShards--; return removed; } @@ -924,7 +899,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards static final class ModelIndex { private final String id; private final Map shards = new HashMap<>(); - private int numPrimaries = -1; private int highestPrimary = -1; public ModelIndex(String id) { @@ -948,10 +922,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return id; } - public Decision getDecicion(ShardRouting shard) { - return shards.get(shard); - } - public int numShards() { return shards.size(); } @@ -960,26 +930,13 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return shards.keySet(); } - public int numPrimaries() { - if (numPrimaries == -1) { - int num = 0; - for (ShardRouting shard : shards.keySet()) { - if (shard.primary()) { - num++; - } - } - return numPrimaries = num; - } - return numPrimaries; - } - public Decision removeShard(ShardRouting shard) { - highestPrimary = numPrimaries = -1; + highestPrimary = -1; return shards.remove(shard); } public void addShard(ShardRouting shard, Decision decision) { - highestPrimary = numPrimaries = -1; + highestPrimary = -1; assert decision != null; assert !shards.containsKey(shard) : "Shard already allocated on current node: " + shards.get(shard) + " " + shard; shards.put(shard, decision); @@ -1011,16 +968,20 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * Resets the sorter, recalculates the weights per node and sorts the * nodes by weight, with minimal weight first. */ - public void reset(Operation operation, String index) { + public void reset(String index, int from, int to) { this.index = index; - for (int i = 0; i < weights.length; i++) { - weights[i] = weight(operation, modelNodes[i]); + for (int i = from; i < to; i++) { + weights[i] = weight(modelNodes[i]); } - sort(0, modelNodes.length); + sort(from, to); } - public float weight(Operation operation, ModelNode node) { - return function.weight(operation, balancer, node, index); + public void reset(String index) { + reset(index, 0, modelNodes.length); + } + + public float weight(ModelNode node) { + return function.weight(balancer, node, index); } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java index 1e835dc4039..13ba033d0ef 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.routing.allocation.command; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Nullable; @@ -44,12 +43,12 @@ public interface AllocationCommand { /** * Reads an {@link AllocationCommand} of type T from a {@link StreamInput} - * @param in {@link StreamInput} to read the {@link AllocationCommand} from + * @param in {@link StreamInput} to read the {@link AllocationCommand} from * @return {@link AllocationCommand} read from the {@link StreamInput} * @throws IOException if something happens during reading */ T readFrom(StreamInput in) throws IOException; - + /** * Writes an {@link AllocationCommand} to a {@link StreamOutput} * @param command {@link AllocationCommand} to write @@ -57,7 +56,7 @@ public interface AllocationCommand { * @throws IOException if something happens during writing the command */ void writeTo(T command, StreamOutput out) throws IOException; - + /** * Reads an {@link AllocationCommand} of type T from a {@link XContentParser} * @param parser {@link XContentParser} to use @@ -65,7 +64,7 @@ public interface AllocationCommand { * @throws IOException if something happens during reading */ T fromXContent(XContentParser parser) throws IOException; - + /** * Writes an {@link AllocationCommand} using an {@link XContentBuilder} * @param command {@link AllocationCommand} to write diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java index f54fce4ca6e..ed535df2f48 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java @@ -21,8 +21,8 @@ package org.elasticsearch.cluster.routing.allocation.command; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java index a6204485d7d..3bd4069ac73 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation.decider; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; @@ -73,6 +74,14 @@ public abstract class AllocationDecider extends AbstractComponent { return Decision.ALWAYS; } + /** + * Returns a {@link Decision} whether the given shard routing can be allocated at all at this state of the + * {@link RoutingAllocation}. The default is {@link Decision#ALWAYS}. + */ + public Decision canAllocate(IndexMetaData indexMetaData, RoutingNode node, RoutingAllocation allocation) { + return Decision.ALWAYS; + } + /** * Returns a {@link Decision} whether the given node can allow any allocation at all at this state of the * {@link RoutingAllocation}. The default is {@link Decision#ALWAYS}. diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java index f57c48e8a75..059748c3f62 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation.decider; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; @@ -120,6 +121,25 @@ public class AllocationDeciders extends AllocationDecider { return ret; } + @Override + public Decision canAllocate(IndexMetaData indexMetaData, RoutingNode node, RoutingAllocation allocation) { + Decision.Multi ret = new Decision.Multi(); + for (AllocationDecider allocationDecider : allocations) { + Decision decision = allocationDecider.canAllocate(indexMetaData, node, allocation); + // short track if a NO is returned. + if (decision == Decision.NO) { + if (!allocation.debugDecision()) { + return decision; + } else { + ret.add(decision); + } + } else if (decision != Decision.ALWAYS) { + ret.add(decision); + } + } + return ret; + } + @Override public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { Decision.Multi ret = new Decision.Multi(); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 6f7bbac8aea..19047bcb0d1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -26,8 +26,9 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; import java.util.HashMap; import java.util.Map; @@ -76,37 +77,12 @@ public class AwarenessAllocationDecider extends AllocationDecider { public static final String NAME = "awareness"; - public static final String CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES = "cluster.routing.allocation.awareness.attributes"; - public static final String CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP = "cluster.routing.allocation.awareness.force."; - - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - String[] awarenessAttributes = settings.getAsArray(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES, null); - if (awarenessAttributes == null && "".equals(settings.get(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES, null))) { - awarenessAttributes = Strings.EMPTY_ARRAY; // the empty string resets this - } - if (awarenessAttributes != null) { - logger.info("updating [cluster.routing.allocation.awareness.attributes] from [{}] to [{}]", AwarenessAllocationDecider.this.awarenessAttributes, awarenessAttributes); - AwarenessAllocationDecider.this.awarenessAttributes = awarenessAttributes; - } - Map forcedAwarenessAttributes = new HashMap<>(AwarenessAllocationDecider.this.forcedAwarenessAttributes); - Map forceGroups = settings.getGroups(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP); - if (!forceGroups.isEmpty()) { - for (Map.Entry entry : forceGroups.entrySet()) { - String[] aValues = entry.getValue().getAsArray("values"); - if (aValues.length > 0) { - forcedAwarenessAttributes.put(entry.getKey(), aValues); - } - } - } - AwarenessAllocationDecider.this.forcedAwarenessAttributes = forcedAwarenessAttributes; - } - } + public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING = new Setting<>("cluster.routing.allocation.awareness.attributes", "", Strings::splitStringByCommaToArray , true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.awareness.force.", true, Setting.Scope.CLUSTER); private String[] awarenessAttributes; - private Map forcedAwarenessAttributes; + private volatile Map forcedAwarenessAttributes; /** * Creates a new {@link AwarenessAllocationDecider} instance @@ -121,24 +97,28 @@ public class AwarenessAllocationDecider extends AllocationDecider { * @param settings {@link Settings} to use */ public AwarenessAllocationDecider(Settings settings) { - this(settings, new NodeSettingsService(settings)); + this(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); } @Inject - public AwarenessAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); - this.awarenessAttributes = settings.getAsArray(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES); + this.awarenessAttributes = CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.get(settings); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, this::setAwarenessAttributes); + setForcedAwarenessAttributes(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.get(settings)); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, this::setForcedAwarenessAttributes); + } - forcedAwarenessAttributes = new HashMap<>(); - Map forceGroups = settings.getGroups(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP); + private void setForcedAwarenessAttributes(Settings forceSettings) { + Map forcedAwarenessAttributes = new HashMap<>(); + Map forceGroups = forceSettings.getAsGroups(); for (Map.Entry entry : forceGroups.entrySet()) { String[] aValues = entry.getValue().getAsArray("values"); if (aValues.length > 0) { forcedAwarenessAttributes.put(entry.getKey(), aValues); } } - - nodeSettingsService.addListener(new ApplySettings()); + this.forcedAwarenessAttributes = forcedAwarenessAttributes; } /** @@ -150,6 +130,10 @@ public class AwarenessAllocationDecider extends AllocationDecider { return this.awarenessAttributes; } + private void setAwarenessAttributes(String[] awarenessAttributes) { + this.awarenessAttributes = awarenessAttributes; + } + @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return underCapacity(shardRouting, node, allocation, true); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index 7638c7aeee8..b1be2a6fce4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -19,13 +19,12 @@ package org.elasticsearch.cluster.routing.allocation.decider; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; import java.util.Locale; @@ -38,10 +37,10 @@ import java.util.Locale; *

    *
  • indices_primaries_active - Re-balancing is allowed only once all * primary shards on all indices are active.
  • - * + * *
  • indices_all_active - Re-balancing is allowed only once all * shards on all indices are active.
  • - * + * *
  • always - Re-balancing is allowed once a shard replication group * is active
  • *
@@ -49,19 +48,10 @@ import java.util.Locale; public class ClusterRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "cluster_rebalance"; - - public static final String CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE = "cluster.routing.allocation.allow_rebalance"; - public static final Validator ALLOCATION_ALLOW_REBALANCE_VALIDATOR = (setting, value, clusterState) -> { - try { - ClusterRebalanceType.parseString(value); - return null; - } catch (IllegalArgumentException e) { - return "the value of " + setting + " must be one of: [always, indices_primaries_active, indices_all_active]"; - } - }; + public static final Setting CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING = new Setting<>("cluster.routing.allocation.allow_rebalance", ClusterRebalanceType.INDICES_ALL_ACTIVE.name().toLowerCase(Locale.ROOT), ClusterRebalanceType::parseString, true, Setting.Scope.CLUSTER); /** - * An enum representation for the configured re-balance type. + * An enum representation for the configured re-balance type. */ public static enum ClusterRebalanceType { /** @@ -73,7 +63,7 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { */ INDICES_PRIMARIES_ACTIVE, /** - * Re-balancing is allowed only once all shards on all indices are active. + * Re-balancing is allowed only once all shards on all indices are active. */ INDICES_ALL_ACTIVE; @@ -85,48 +75,28 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { } else if ("indices_all_active".equalsIgnoreCase(typeString) || "indicesAllActive".equalsIgnoreCase(typeString)) { return ClusterRebalanceType.INDICES_ALL_ACTIVE; } - throw new IllegalArgumentException("Illegal value for " + CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE + ": " + typeString); + throw new IllegalArgumentException("Illegal value for " + CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING + ": " + typeString); } } - private ClusterRebalanceType type; + private volatile ClusterRebalanceType type; @Inject - public ClusterRebalanceAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public ClusterRebalanceAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); - String allowRebalance = settings.get(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "indices_all_active"); try { - type = ClusterRebalanceType.parseString(allowRebalance); + type = CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.get(settings); } catch (IllegalStateException e) { - logger.warn("[{}] has a wrong value {}, defaulting to 'indices_all_active'", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, allowRebalance); + logger.warn("[{}] has a wrong value {}, defaulting to 'indices_all_active'", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getRaw(settings)); type = ClusterRebalanceType.INDICES_ALL_ACTIVE; } - logger.debug("using [{}] with [{}]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, type.toString().toLowerCase(Locale.ROOT)); + logger.debug("using [{}] with [{}]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, type.toString().toLowerCase(Locale.ROOT)); - nodeSettingsService.addListener(new ApplySettings()); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, this::setType); } - class ApplySettings implements NodeSettingsService.Listener { - - @Override - public void onRefreshSettings(Settings settings) { - String newAllowRebalance = settings.get(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, null); - if (newAllowRebalance != null) { - ClusterRebalanceType newType = null; - try { - newType = ClusterRebalanceType.parseString(newAllowRebalance); - } catch (IllegalArgumentException e) { - // ignore - } - - if (newType != null && newType != ClusterRebalanceAllocationDecider.this.type) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, - ClusterRebalanceAllocationDecider.this.type.toString().toLowerCase(Locale.ROOT), - newType.toString().toLowerCase(Locale.ROOT)); - ClusterRebalanceAllocationDecider.this.type = newType; - } - } - } + private void setType(ClusterRebalanceType type) { + this.type = type; } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index 6bd1b437acf..a9ad35fd526 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -22,8 +22,9 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; /** * Similar to the {@link ClusterRebalanceAllocationDecider} this @@ -41,27 +42,19 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "concurrent_rebalance"; - public static final String CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE = "cluster.routing.allocation.cluster_concurrent_rebalance"; - - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - int clusterConcurrentRebalance = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, ConcurrentRebalanceAllocationDecider.this.clusterConcurrentRebalance); - if (clusterConcurrentRebalance != ConcurrentRebalanceAllocationDecider.this.clusterConcurrentRebalance) { - logger.info("updating [cluster.routing.allocation.cluster_concurrent_rebalance] from [{}], to [{}]", ConcurrentRebalanceAllocationDecider.this.clusterConcurrentRebalance, clusterConcurrentRebalance); - ConcurrentRebalanceAllocationDecider.this.clusterConcurrentRebalance = clusterConcurrentRebalance; - } - } - } - + public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, -1, true, Setting.Scope.CLUSTER); private volatile int clusterConcurrentRebalance; @Inject - public ConcurrentRebalanceAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public ConcurrentRebalanceAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); - this.clusterConcurrentRebalance = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, 2); + this.clusterConcurrentRebalance = CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.get(settings); logger.debug("using [cluster_concurrent_rebalance] with [{}]", clusterConcurrentRebalance); - nodeSettingsService.addListener(new ApplySettings()); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING, this::setClusterConcurrentRebalance); + } + + private void setClusterConcurrentRebalance(int concurrentRebalance) { + clusterConcurrentRebalance = concurrentRebalance; } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index a02c72c5745..23624f050a9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -22,13 +22,13 @@ package org.elasticsearch.cluster.routing.allocation.decider; import com.carrotsearch.hppc.ObjectLookupContainer; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.DiskUsage; import org.elasticsearch.cluster.EmptyClusterInfoService; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -36,12 +36,13 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.RatioValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.node.settings.NodeSettingsService; import java.util.Set; @@ -80,53 +81,11 @@ public class DiskThresholdDecider extends AllocationDecider { private volatile boolean enabled; private volatile TimeValue rerouteInterval; - public static final String CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED = "cluster.routing.allocation.disk.threshold_enabled"; - public static final String CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK = "cluster.routing.allocation.disk.watermark.low"; - public static final String CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK = "cluster.routing.allocation.disk.watermark.high"; - public static final String CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS = "cluster.routing.allocation.disk.include_relocations"; - public static final String CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL = "cluster.routing.allocation.disk.reroute_interval"; - - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - String newLowWatermark = settings.get(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, null); - String newHighWatermark = settings.get(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, null); - Boolean newRelocationsSetting = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, null); - Boolean newEnableSetting = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, null); - TimeValue newRerouteInterval = settings.getAsTime(CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, null); - - if (newEnableSetting != null) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, - DiskThresholdDecider.this.enabled, newEnableSetting); - DiskThresholdDecider.this.enabled = newEnableSetting; - } - if (newRelocationsSetting != null) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, - DiskThresholdDecider.this.includeRelocations, newRelocationsSetting); - DiskThresholdDecider.this.includeRelocations = newRelocationsSetting; - } - if (newLowWatermark != null) { - if (!validWatermarkSetting(newLowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK)) { - throw new ElasticsearchParseException("unable to parse low watermark [{}]", newLowWatermark); - } - logger.info("updating [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, newLowWatermark); - DiskThresholdDecider.this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(newLowWatermark); - DiskThresholdDecider.this.freeBytesThresholdLow = thresholdBytesFromWatermark(newLowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK); - } - if (newHighWatermark != null) { - if (!validWatermarkSetting(newHighWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK)) { - throw new ElasticsearchParseException("unable to parse high watermark [{}]", newHighWatermark); - } - logger.info("updating [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, newHighWatermark); - DiskThresholdDecider.this.freeDiskThresholdHigh = 100.0 - thresholdPercentageFromWatermark(newHighWatermark); - DiskThresholdDecider.this.freeBytesThresholdHigh = thresholdBytesFromWatermark(newHighWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK); - } - if (newRerouteInterval != null) { - logger.info("updating [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, newRerouteInterval); - DiskThresholdDecider.this.rerouteInterval = newRerouteInterval; - } - } - } + public static final Setting CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.low", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.high", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, true, Setting.Scope.CLUSTER);; + public static final Setting CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.CLUSTER); /** * Listens for a node to go over the high watermark and kicks off an empty @@ -231,38 +190,49 @@ public class DiskThresholdDecider extends AllocationDecider { // It's okay the Client is null here, because the empty cluster info // service will never actually call the listener where the client is // needed. Also this constructor is only used for tests - this(settings, new NodeSettingsService(settings), EmptyClusterInfoService.INSTANCE, null); + this(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), EmptyClusterInfoService.INSTANCE, null); } @Inject - public DiskThresholdDecider(Settings settings, NodeSettingsService nodeSettingsService, ClusterInfoService infoService, Client client) { + public DiskThresholdDecider(Settings settings, ClusterSettings clusterSettings, ClusterInfoService infoService, Client client) { super(settings); - String lowWatermark = settings.get(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "85%"); - String highWatermark = settings.get(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "90%"); - - if (!validWatermarkSetting(lowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK)) { - throw new ElasticsearchParseException("unable to parse low watermark [{}]", lowWatermark); - } - if (!validWatermarkSetting(highWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK)) { - throw new ElasticsearchParseException("unable to parse high watermark [{}]", highWatermark); - } - // Watermark is expressed in terms of used data, but we need "free" data watermark - this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(lowWatermark); - this.freeDiskThresholdHigh = 100.0 - thresholdPercentageFromWatermark(highWatermark); - - this.freeBytesThresholdLow = thresholdBytesFromWatermark(lowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK); - this.freeBytesThresholdHigh = thresholdBytesFromWatermark(highWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK); - this.includeRelocations = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, true); - this.rerouteInterval = settings.getAsTime(CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, TimeValue.timeValueSeconds(60)); - - this.enabled = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true); - nodeSettingsService.addListener(new ApplySettings()); + final String lowWatermark = CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.get(settings); + final String highWatermark = CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.get(settings); + setHighWatermark(highWatermark); + setLowWatermark(lowWatermark); + this.includeRelocations = CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.get(settings); + this.rerouteInterval = CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.get(settings); + this.enabled = CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.get(settings); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING, this::setLowWatermark); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING, this::setHighWatermark); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING, this::setIncludeRelocations); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING, this::setRerouteInterval); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, this::setEnabled); infoService.addListener(new DiskListener(client)); } - // For Testing - ApplySettings newApplySettings() { - return new ApplySettings(); + private void setIncludeRelocations(boolean includeRelocations) { + this.includeRelocations = includeRelocations; + } + + private void setRerouteInterval(TimeValue rerouteInterval) { + this.rerouteInterval = rerouteInterval; + } + + private void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + private void setLowWatermark(String lowWatermark) { + // Watermark is expressed in terms of used data, but we need "free" data watermark + this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(lowWatermark); + this.freeBytesThresholdLow = thresholdBytesFromWatermark(lowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey()); + } + + private void setHighWatermark(String highWatermark) { + // Watermark is expressed in terms of used data, but we need "free" data watermark + this.freeDiskThresholdHigh = 100.0 - thresholdPercentageFromWatermark(highWatermark); + this.freeBytesThresholdHigh = thresholdBytesFromWatermark(highWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey()); } // For Testing @@ -360,7 +330,8 @@ public class DiskThresholdDecider extends AllocationDecider { } // a flag for whether the primary shard has been previously allocated - boolean primaryHasBeenAllocated = shardRouting.primary() && shardRouting.allocatedPostIndexCreate(); + IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); + boolean primaryHasBeenAllocated = shardRouting.primary() && shardRouting.allocatedPostIndexCreate(indexMetaData); // checks for exact byte comparisons if (freeBytes < freeBytesThresholdLow.bytes()) { @@ -580,20 +551,21 @@ public class DiskThresholdDecider extends AllocationDecider { /** * Checks if a watermark string is a valid percentage or byte size value, - * returning true if valid, false if invalid. + * @return the watermark value given */ - public boolean validWatermarkSetting(String watermark, String settingName) { + public static String validWatermarkSetting(String watermark, String settingName) { try { RatioValue.parseRatioValue(watermark); - return true; } catch (ElasticsearchParseException e) { try { ByteSizeValue.parseBytesSizeValue(watermark, settingName); - return true; } catch (ElasticsearchParseException ex) { - return false; + ex.addSuppressed(e); + throw ex; } } + return watermark; + } private Decision earlyTerminate(RoutingAllocation allocation, ImmutableOpenMap usages) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index 0bbd4935044..a31d36db349 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -19,18 +19,20 @@ package org.elasticsearch.cluster.routing.allocation.decider; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; import java.util.Locale; /** - * This allocation decider allows shard allocations / rebalancing via the cluster wide settings {@link #CLUSTER_ROUTING_ALLOCATION_ENABLE} / - * {@link #CLUSTER_ROUTING_REBALANCE_ENABLE} and the per index setting {@link #INDEX_ROUTING_ALLOCATION_ENABLE} / {@link #INDEX_ROUTING_REBALANCE_ENABLE}. + * This allocation decider allows shard allocations / rebalancing via the cluster wide settings {@link #CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING} / + * {@link #CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} and the per index setting {@link #INDEX_ROUTING_ALLOCATION_ENABLE} / {@link #INDEX_ROUTING_REBALANCE_ENABLE}. * The per index settings overrides the cluster wide setting. * *

@@ -54,26 +56,34 @@ import java.util.Locale; * @see Rebalance * @see Allocation */ -public class EnableAllocationDecider extends AllocationDecider implements NodeSettingsService.Listener { +public class EnableAllocationDecider extends AllocationDecider { public static final String NAME = "enable"; - public static final String CLUSTER_ROUTING_ALLOCATION_ENABLE = "cluster.routing.allocation.enable"; - public static final String INDEX_ROUTING_ALLOCATION_ENABLE = "index.routing.allocation.enable"; + public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("cluster.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.CLUSTER); + public static final String INDEX_ROUTING_ALLOCATION_ENABLE= "index.routing.allocation.enable"; - public static final String CLUSTER_ROUTING_REBALANCE_ENABLE = "cluster.routing.rebalance.enable"; + public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("cluster.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.CLUSTER); public static final String INDEX_ROUTING_REBALANCE_ENABLE = "index.routing.rebalance.enable"; private volatile Rebalance enableRebalance; private volatile Allocation enableAllocation; - @Inject - public EnableAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public EnableAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); - this.enableAllocation = Allocation.parse(settings.get(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.ALL.name())); - this.enableRebalance = Rebalance.parse(settings.get(CLUSTER_ROUTING_REBALANCE_ENABLE, Rebalance.ALL.name())); - nodeSettingsService.addListener(this); + this.enableAllocation = CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.get(settings); + this.enableRebalance = CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.get(settings); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, this::setEnableAllocation); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING, this::setEnableRebalance); + } + + public void setEnableRebalance(Rebalance enableRebalance) { + this.enableRebalance = enableRebalance; + } + + public void setEnableAllocation(Allocation enableAllocation) { + this.enableAllocation = enableAllocation; } @Override @@ -82,8 +92,8 @@ public class EnableAllocationDecider extends AllocationDecider implements NodeSe return allocation.decision(Decision.YES, NAME, "allocation disabling is ignored"); } - Settings indexSettings = allocation.routingNodes().metaData().index(shardRouting.index()).getSettings(); - String enableIndexValue = indexSettings.get(INDEX_ROUTING_ALLOCATION_ENABLE); + IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); + String enableIndexValue = indexMetaData.getSettings().get(INDEX_ROUTING_ALLOCATION_ENABLE); final Allocation enable; if (enableIndexValue != null) { enable = Allocation.parse(enableIndexValue); @@ -96,7 +106,7 @@ public class EnableAllocationDecider extends AllocationDecider implements NodeSe case NONE: return allocation.decision(Decision.NO, NAME, "no allocations are allowed"); case NEW_PRIMARIES: - if (shardRouting.primary() && shardRouting.allocatedPostIndexCreate() == false) { + if (shardRouting.primary() && shardRouting.allocatedPostIndexCreate(indexMetaData) == false) { return allocation.decision(Decision.YES, NAME, "new primary allocations are allowed"); } else { return allocation.decision(Decision.NO, NAME, "non-new primary allocations are forbidden"); @@ -148,25 +158,9 @@ public class EnableAllocationDecider extends AllocationDecider implements NodeSe } } - @Override - public void onRefreshSettings(Settings settings) { - final Allocation enable = Allocation.parse(settings.get(CLUSTER_ROUTING_ALLOCATION_ENABLE, this.enableAllocation.name())); - if (enable != this.enableAllocation) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_ENABLE, this.enableAllocation, enable); - EnableAllocationDecider.this.enableAllocation = enable; - } - - final Rebalance enableRebalance = Rebalance.parse(settings.get(CLUSTER_ROUTING_REBALANCE_ENABLE, this.enableRebalance.name())); - if (enableRebalance != this.enableRebalance) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_ROUTING_REBALANCE_ENABLE, this.enableRebalance, enableRebalance); - EnableAllocationDecider.this.enableRebalance = enableRebalance; - } - - } - /** * Allocation values or rather their string representation to be used used with - * {@link EnableAllocationDecider#CLUSTER_ROUTING_ALLOCATION_ENABLE} / {@link EnableAllocationDecider#INDEX_ROUTING_ALLOCATION_ENABLE} + * {@link EnableAllocationDecider#CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_ALLOCATION_ENABLE} * via cluster / index settings. */ public enum Allocation { @@ -192,7 +186,7 @@ public class EnableAllocationDecider extends AllocationDecider implements NodeSe /** * Rebalance values or rather their string representation to be used used with - * {@link EnableAllocationDecider#CLUSTER_ROUTING_REBALANCE_ENABLE} / {@link EnableAllocationDecider#INDEX_ROUTING_REBALANCE_ENABLE} + * {@link EnableAllocationDecider#CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_REBALANCE_ENABLE} * via cluster / index settings. */ public enum Rebalance { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index e0e2caaf04a..eb9fe10e965 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -25,10 +25,9 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; - -import java.util.Map; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR; @@ -65,36 +64,23 @@ public class FilterAllocationDecider extends AllocationDecider { public static final String INDEX_ROUTING_INCLUDE_GROUP = "index.routing.allocation.include."; public static final String INDEX_ROUTING_EXCLUDE_GROUP = "index.routing.allocation.exclude."; - public static final String CLUSTER_ROUTING_REQUIRE_GROUP = "cluster.routing.allocation.require."; - public static final String CLUSTER_ROUTING_INCLUDE_GROUP = "cluster.routing.allocation.include."; - public static final String CLUSTER_ROUTING_EXCLUDE_GROUP = "cluster.routing.allocation.exclude."; + public static final Setting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.require.", true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.include.", true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.exclude.", true, Setting.Scope.CLUSTER); private volatile DiscoveryNodeFilters clusterRequireFilters; private volatile DiscoveryNodeFilters clusterIncludeFilters; private volatile DiscoveryNodeFilters clusterExcludeFilters; @Inject - public FilterAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public FilterAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); - Map requireMap = settings.getByPrefix(CLUSTER_ROUTING_REQUIRE_GROUP).getAsMap(); - if (requireMap.isEmpty()) { - clusterRequireFilters = null; - } else { - clusterRequireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap); - } - Map includeMap = settings.getByPrefix(CLUSTER_ROUTING_INCLUDE_GROUP).getAsMap(); - if (includeMap.isEmpty()) { - clusterIncludeFilters = null; - } else { - clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap); - } - Map excludeMap = settings.getByPrefix(CLUSTER_ROUTING_EXCLUDE_GROUP).getAsMap(); - if (excludeMap.isEmpty()) { - clusterExcludeFilters = null; - } else { - clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap); - } - nodeSettingsService.addListener(new ApplySettings()); + setClusterRequireFilters(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING.get(settings)); + setClusterExcludeFilters(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.get(settings)); + setClusterIncludeFilters(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.get(settings)); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING, this::setClusterRequireFilters); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING, this::setClusterExcludeFilters); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, this::setClusterIncludeFilters); } @Override @@ -102,29 +88,37 @@ public class FilterAllocationDecider extends AllocationDecider { return shouldFilter(shardRouting, node, allocation); } + @Override + public Decision canAllocate(IndexMetaData indexMetaData, RoutingNode node, RoutingAllocation allocation) { + return shouldFilter(indexMetaData, node, allocation); + } + @Override public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return shouldFilter(shardRouting, node, allocation); } private Decision shouldFilter(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - if (clusterRequireFilters != null) { - if (!clusterRequireFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not match global required filters [%s]", clusterRequireFilters); - } - } - if (clusterIncludeFilters != null) { - if (!clusterIncludeFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not match global include filters [%s]", clusterIncludeFilters); - } - } - if (clusterExcludeFilters != null) { - if (clusterExcludeFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node matches global exclude filters [%s]", clusterExcludeFilters); - } - } + Decision decision = shouldClusterFilter(node, allocation); + if (decision != null) return decision; - IndexMetaData indexMd = allocation.routingNodes().metaData().index(shardRouting.index()); + decision = shouldIndexFilter(allocation.routingNodes().metaData().index(shardRouting.index()), node, allocation); + if (decision != null) return decision; + + return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters"); + } + + private Decision shouldFilter(IndexMetaData indexMd, RoutingNode node, RoutingAllocation allocation) { + Decision decision = shouldClusterFilter(node, allocation); + if (decision != null) return decision; + + decision = shouldIndexFilter(indexMd, node, allocation); + if (decision != null) return decision; + + return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters"); + } + + private Decision shouldIndexFilter(IndexMetaData indexMd, RoutingNode node, RoutingAllocation allocation) { if (indexMd.requireFilters() != null) { if (!indexMd.requireFilters().match(node.node())) { return allocation.decision(Decision.NO, NAME, "node does not match index required filters [%s]", indexMd.requireFilters()); @@ -140,25 +134,35 @@ public class FilterAllocationDecider extends AllocationDecider { return allocation.decision(Decision.NO, NAME, "node matches index exclude filters [%s]", indexMd.excludeFilters()); } } - - return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters"); + return null; } - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - Map requireMap = settings.getByPrefix(CLUSTER_ROUTING_REQUIRE_GROUP).getAsMap(); - if (!requireMap.isEmpty()) { - clusterRequireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap); - } - Map includeMap = settings.getByPrefix(CLUSTER_ROUTING_INCLUDE_GROUP).getAsMap(); - if (!includeMap.isEmpty()) { - clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap); - } - Map excludeMap = settings.getByPrefix(CLUSTER_ROUTING_EXCLUDE_GROUP).getAsMap(); - if (!excludeMap.isEmpty()) { - clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap); + private Decision shouldClusterFilter(RoutingNode node, RoutingAllocation allocation) { + if (clusterRequireFilters != null) { + if (!clusterRequireFilters.match(node.node())) { + return allocation.decision(Decision.NO, NAME, "node does not match global required filters [%s]", clusterRequireFilters); } } + if (clusterIncludeFilters != null) { + if (!clusterIncludeFilters.match(node.node())) { + return allocation.decision(Decision.NO, NAME, "node does not match global include filters [%s]", clusterIncludeFilters); + } + } + if (clusterExcludeFilters != null) { + if (clusterExcludeFilters.match(node.node())) { + return allocation.decision(Decision.NO, NAME, "node matches global exclude filters [%s]", clusterExcludeFilters); + } + } + return null; + } + + private void setClusterRequireFilters(Settings settings) { + clusterRequireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, settings.getAsMap()); + } + private void setClusterIncludeFilters(Settings settings) { + clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, settings.getAsMap()); + } + private void setClusterExcludeFilters(Settings settings) { + clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, settings.getAsMap()); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index 3d68ed50d27..9149d04cf60 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -24,16 +24,16 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; /** * This {@link AllocationDecider} limits the number of shards per node on a per * index or node-wide basis. The allocator prevents a single node to hold more * than {@value #INDEX_TOTAL_SHARDS_PER_NODE} per index and - * {@value #CLUSTER_TOTAL_SHARDS_PER_NODE} globally during the allocation + * cluster.routing.allocation.total_shards_per_node globally during the allocation * process. The limits of this decider can be changed in real-time via a the * index settings API. *

@@ -64,26 +64,18 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { * Controls the maximum number of shards per node on a global level. * Negative values are interpreted as unlimited. */ - public static final String CLUSTER_TOTAL_SHARDS_PER_NODE = "cluster.routing.allocation.total_shards_per_node"; + public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, true, Setting.Scope.CLUSTER); - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - Integer newClusterLimit = settings.getAsInt(CLUSTER_TOTAL_SHARDS_PER_NODE, null); - - if (newClusterLimit != null) { - logger.info("updating [{}] from [{}] to [{}]", CLUSTER_TOTAL_SHARDS_PER_NODE, - ShardsLimitAllocationDecider.this.clusterShardLimit, newClusterLimit); - ShardsLimitAllocationDecider.this.clusterShardLimit = newClusterLimit; - } - } - } @Inject - public ShardsLimitAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public ShardsLimitAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); - this.clusterShardLimit = settings.getAsInt(CLUSTER_TOTAL_SHARDS_PER_NODE, -1); - nodeSettingsService.addListener(new ApplySettings()); + this.clusterShardLimit = CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.get(settings); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING, this::setClusterShardLimit); + } + + private void setClusterShardLimit(int clusterShardLimit) { + this.clusterShardLimit = clusterShardLimit; } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java index 37b9f9f461b..cf889cde6ad 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java @@ -24,8 +24,9 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; /** * This {@link org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider} prevents shards that @@ -38,18 +39,7 @@ public class SnapshotInProgressAllocationDecider extends AllocationDecider { /** * Disables relocation of shards that are currently being snapshotted. */ - public static final String CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED = "cluster.routing.allocation.snapshot.relocation_enabled"; - - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - boolean newEnableRelocation = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED, enableRelocation); - if (newEnableRelocation != enableRelocation) { - logger.info("updating [{}] from [{}], to [{}]", CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED, enableRelocation, newEnableRelocation); - enableRelocation = newEnableRelocation; - } - } - } + public static final Setting CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.snapshot.relocation_enabled", false, true, Setting.Scope.CLUSTER); private volatile boolean enableRelocation = false; @@ -66,14 +56,18 @@ public class SnapshotInProgressAllocationDecider extends AllocationDecider { * @param settings {@link org.elasticsearch.common.settings.Settings} to use */ public SnapshotInProgressAllocationDecider(Settings settings) { - this(settings, new NodeSettingsService(settings)); + this(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); } @Inject - public SnapshotInProgressAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public SnapshotInProgressAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); - enableRelocation = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED, enableRelocation); - nodeSettingsService.addListener(new ApplySettings()); + enableRelocation = CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING.get(settings); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING, this::setEnableRelocation); + } + + private void setEnableRelocation(boolean enableRelocation) { + this.enableRelocation = enableRelocation; } /** diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index ed6814d83af..25f43f57610 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -21,11 +21,11 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; /** * {@link ThrottlingAllocationDecider} controls the recovery process per node in @@ -47,27 +47,43 @@ import org.elasticsearch.node.settings.NodeSettingsService; */ public class ThrottlingAllocationDecider extends AllocationDecider { - public static final String NAME = "throttling"; - - public static final String CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES = "cluster.routing.allocation.node_initial_primaries_recoveries"; - public static final String CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES = "cluster.routing.allocation.node_concurrent_recoveries"; - public static final String CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES = "cluster.routing.allocation.concurrent_recoveries"; - public static final int DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES = 2; public static final int DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES = 4; + public static final String NAME = "throttling"; + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_recoveries"), true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, 0, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_incoming_recoveries", (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_incoming_recoveries"), true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_outgoing_recoveries", (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_outgoing_recoveries"), true, Setting.Scope.CLUSTER); + private volatile int primariesInitialRecoveries; - private volatile int concurrentRecoveries; + private volatile int concurrentIncomingRecoveries; + private volatile int concurrentOutgoingRecoveries; + @Inject - public ThrottlingAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) { + public ThrottlingAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); + this.primariesInitialRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.get(settings); + concurrentIncomingRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.get(settings); + concurrentOutgoingRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.get(settings); - this.primariesInitialRecoveries = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES); - this.concurrentRecoveries = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES, settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES)); - logger.debug("using node_concurrent_recoveries [{}], node_initial_primaries_recoveries [{}]", concurrentRecoveries, primariesInitialRecoveries); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, this::setPrimariesInitialRecoveries); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, this::setConcurrentIncomingRecoverries); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, this::setConcurrentOutgoingRecoverries); - nodeSettingsService.addListener(new ApplySettings()); + logger.debug("using node_concurrent_outgoing_recoveries [{}], node_concurrent_incoming_recoveries [{}], node_initial_primaries_recoveries [{}]", concurrentOutgoingRecoveries, concurrentIncomingRecoveries, primariesInitialRecoveries); + } + + private void setConcurrentIncomingRecoverries(int concurrentIncomingRecoveries) { + this.concurrentIncomingRecoveries = concurrentIncomingRecoveries; + } + private void setConcurrentOutgoingRecoverries(int concurrentOutgoingRecoveries) { + this.concurrentOutgoingRecoveries = concurrentOutgoingRecoveries; + } + + private void setPrimariesInitialRecoveries(int primariesInitialRecoveries) { + this.primariesInitialRecoveries = primariesInitialRecoveries; } @Override @@ -93,7 +109,7 @@ public class ThrottlingAllocationDecider extends AllocationDecider { } } } - + // TODO should we allow shards not allocated post API to always allocate? // either primary or replica doing recovery (from peer shard) // count the number of recoveries on the node, its for both target (INITIALIZING) and source (RELOCATING) @@ -102,34 +118,16 @@ public class ThrottlingAllocationDecider extends AllocationDecider { @Override public Decision canAllocate(RoutingNode node, RoutingAllocation allocation) { - int currentRecoveries = 0; - for (ShardRouting shard : node) { - if (shard.initializing()) { - currentRecoveries++; - } - } - if (currentRecoveries >= concurrentRecoveries) { - return allocation.decision(Decision.THROTTLE, NAME, "too many shards currently recovering [%d], limit: [%d]", - currentRecoveries, concurrentRecoveries); - } else { - return allocation.decision(Decision.YES, NAME, "below shard recovery limit of [%d]", concurrentRecoveries); - } - } - - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - int primariesInitialRecoveries = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, ThrottlingAllocationDecider.this.primariesInitialRecoveries); - if (primariesInitialRecoveries != ThrottlingAllocationDecider.this.primariesInitialRecoveries) { - logger.info("updating [cluster.routing.allocation.node_initial_primaries_recoveries] from [{}] to [{}]", ThrottlingAllocationDecider.this.primariesInitialRecoveries, primariesInitialRecoveries); - ThrottlingAllocationDecider.this.primariesInitialRecoveries = primariesInitialRecoveries; - } - - int concurrentRecoveries = settings.getAsInt(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, ThrottlingAllocationDecider.this.concurrentRecoveries); - if (concurrentRecoveries != ThrottlingAllocationDecider.this.concurrentRecoveries) { - logger.info("updating [cluster.routing.allocation.node_concurrent_recoveries] from [{}] to [{}]", ThrottlingAllocationDecider.this.concurrentRecoveries, concurrentRecoveries); - ThrottlingAllocationDecider.this.concurrentRecoveries = concurrentRecoveries; - } + int currentOutRecoveries = allocation.routingNodes().getOutgoingRecoveries(node.nodeId()); + int currentInRecoveries = allocation.routingNodes().getIncomingRecoveries(node.nodeId()); + if (currentOutRecoveries >= concurrentOutgoingRecoveries) { + return allocation.decision(Decision.THROTTLE, NAME, "too many outgoing shards currently recovering [%d], limit: [%d]", + currentOutRecoveries, concurrentOutgoingRecoveries); + } else if (currentInRecoveries >= concurrentIncomingRecoveries) { + return allocation.decision(Decision.THROTTLE, NAME, "too many incoming shards currently recovering [%d], limit: [%d]", + currentInRecoveries, concurrentIncomingRecoveries); + } else { + return allocation.decision(Decision.YES, NAME, "below shard recovery limit of outgoing: [%d] incoming: [%d]", concurrentOutgoingRecoveries, concurrentIncomingRecoveries); } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index d4b15861846..ca135728b87 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -20,8 +20,19 @@ package org.elasticsearch.cluster.service; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.AckedClusterStateTaskListener; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState.Builder; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.LocalNodeMasterListener; +import org.elasticsearch.cluster.TimeoutClusterStateListener; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; @@ -38,20 +49,40 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.*; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; +import org.elasticsearch.common.util.concurrent.PrioritizedRunnable; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryService; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.Executor; +import java.util.concurrent.Future; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; @@ -62,8 +93,8 @@ import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadF */ public class InternalClusterService extends AbstractLifecycleComponent implements ClusterService { - public static final String SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD = "cluster.service.slow_task_logging_threshold"; - public static final String SETTING_CLUSTER_SERVICE_RECONNECT_INTERVAL = "cluster.service.reconnect_interval"; + public static final Setting CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING = Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.service.reconnect_interval", TimeValue.timeValueSeconds(10), false, Setting.Scope.CLUSTER); public static final String UPDATE_THREAD_NAME = "clusterService#updateTask"; private final ThreadPool threadPool; @@ -74,7 +105,7 @@ public class InternalClusterService extends AbstractLifecycleComponent executor, final ClusterStateTaskListener listener ) { + innerSubmitStateUpdateTask(source, task, config, executor, safe(listener, logger)); + } + + private void innerSubmitStateUpdateTask(final String source, final T task, + final ClusterStateTaskConfig config, + final ClusterStateTaskExecutor executor, + final SafeClusterStateTaskListener listener) { if (!lifecycle.started()) { return; } @@ -292,6 +333,7 @@ public class InternalClusterService extends AbstractLifecycleComponent threadPool.generic().execute(() -> { if (updateTask.processed.getAndSet(true) == false) { + logger.debug("cluster state update task [{}] timed out after [{}]", source, config.timeout()); listener.onFailure(source, new ProcessClusterEventTimeoutException(config.timeout(), source)); }})); } else { @@ -327,7 +369,7 @@ public class InternalClusterService extends AbstractLifecycleComponent updateTask : toExecute) { + assert batchResult.executionResults.containsKey(updateTask.task) : "missing task result for [" + updateTask.task + "]"; + } + } ClusterState newClusterState = batchResult.resultingState; final ArrayList> proccessedListeners = new ArrayList<>(); @@ -421,7 +476,13 @@ public class InternalClusterService extends AbstractLifecycleComponent proccessedListeners.add(updateTask), ex -> updateTask.listener.onFailure(updateTask.source, ex)); + executionResult.handle( + () -> proccessedListeners.add(updateTask), + ex -> { + logger.debug("cluster state update task [{}] failed", ex, updateTask.source); + updateTask.listener.onFailure(updateTask.source, ex); + } + ); } if (previousClusterState == newClusterState) { @@ -521,6 +582,15 @@ public class InternalClusterService extends AbstractLifecycleComponent extends SourcePrioritizedRunnable { public final T task; @@ -846,12 +1007,4 @@ public class InternalClusterService extends AbstractLifecycleComponent { } public static final int NUMBER_OF_SEGMENTS = 256; - private final CacheSegment[] segments = new CacheSegment[NUMBER_OF_SEGMENTS]; + @SuppressWarnings("unchecked") private final CacheSegment[] segments = new CacheSegment[NUMBER_OF_SEGMENTS]; { for (int i = 0; i < segments.length; i++) { @@ -428,7 +432,7 @@ public class Cache { promote(tuple.v1(), now); } if (replaced) { - removalListener.onRemoval(new RemovalNotification(tuple.v2().key, tuple.v2().value, RemovalNotification.RemovalReason.REPLACED)); + removalListener.onRemoval(new RemovalNotification<>(tuple.v2().key, tuple.v2().value, RemovalNotification.RemovalReason.REPLACED)); } } diff --git a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java b/core/src/main/java/org/elasticsearch/common/cli/Terminal.java index 9523115b024..82898b3e457 100644 --- a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java +++ b/core/src/main/java/org/elasticsearch/common/cli/Terminal.java @@ -22,7 +22,11 @@ package org.elasticsearch.common.cli; import org.apache.commons.cli.CommandLine; import org.elasticsearch.common.SuppressForbidden; -import java.io.*; +import java.io.BufferedReader; +import java.io.Console; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.PrintWriter; import java.util.Locale; /** diff --git a/core/src/main/java/org/elasticsearch/common/collect/CopyOnWriteHashMap.java b/core/src/main/java/org/elasticsearch/common/collect/CopyOnWriteHashMap.java index c0114480498..3c0579c87e1 100644 --- a/core/src/main/java/org/elasticsearch/common/collect/CopyOnWriteHashMap.java +++ b/core/src/main/java/org/elasticsearch/common/collect/CopyOnWriteHashMap.java @@ -18,10 +18,20 @@ */ package org.elasticsearch.common.collect; + import org.apache.lucene.util.mutable.MutableValueInt; import java.lang.reflect.Array; -import java.util.*; +import java.util.AbstractMap; +import java.util.AbstractSet; +import java.util.ArrayDeque; +import java.util.Arrays; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Set; import java.util.stream.Stream; /** diff --git a/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java b/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java index b807d48a971..0b53614723b 100644 --- a/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java +++ b/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java @@ -19,7 +19,13 @@ package org.elasticsearch.common.collect; -import com.carrotsearch.hppc.*; +import com.carrotsearch.hppc.IntCollection; +import com.carrotsearch.hppc.IntContainer; +import com.carrotsearch.hppc.IntLookupContainer; +import com.carrotsearch.hppc.IntObjectAssociativeContainer; +import com.carrotsearch.hppc.IntObjectHashMap; +import com.carrotsearch.hppc.IntObjectMap; +import com.carrotsearch.hppc.ObjectContainer; import com.carrotsearch.hppc.cursors.IntCursor; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; diff --git a/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java b/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java index 47c1bdfb826..c5210b1c2dc 100644 --- a/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java +++ b/core/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java @@ -19,7 +19,12 @@ package org.elasticsearch.common.collect; -import com.carrotsearch.hppc.*; +import com.carrotsearch.hppc.ObjectCollection; +import com.carrotsearch.hppc.ObjectContainer; +import com.carrotsearch.hppc.ObjectLookupContainer; +import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; +import com.carrotsearch.hppc.ObjectObjectHashMap; +import com.carrotsearch.hppc.ObjectObjectMap; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.predicates.ObjectObjectPredicate; @@ -241,8 +246,8 @@ public final class ImmutableOpenMap implements Iterable(map); } - - + + /** * Puts all the entries in the map to the builder. */ diff --git a/core/src/main/java/org/elasticsearch/common/component/LifecycleComponent.java b/core/src/main/java/org/elasticsearch/common/component/LifecycleComponent.java index 452f644462f..2933bffa630 100644 --- a/core/src/main/java/org/elasticsearch/common/component/LifecycleComponent.java +++ b/core/src/main/java/org/elasticsearch/common/component/LifecycleComponent.java @@ -19,11 +19,8 @@ package org.elasticsearch.common.component; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lease.Releasable; -import java.io.Closeable; - /** * */ diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java b/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java index 2a31596eab9..69a7d1f3e57 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortingNumericDoubleValues; + import java.io.IOException; import java.util.Locale; diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java index 7130537fceb..513a7977d67 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java @@ -146,7 +146,7 @@ public final class GeoPoint { @Override public String toString() { - return "[" + lat + ", " + lon + "]"; + return lat + ", " + lon; } public static GeoPoint parseFromLatLon(String latLon) { diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index 83a8adab1e0..cec805e7a80 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -21,9 +21,9 @@ package org.elasticsearch.common.geo; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; +import org.apache.lucene.util.GeoDistanceUtils; import org.apache.lucene.util.SloppyMath; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.Numbers; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; @@ -66,19 +66,11 @@ public class GeoUtils { /** Earth ellipsoid polar distance in meters */ public static final double EARTH_POLAR_DISTANCE = Math.PI * EARTH_SEMI_MINOR_AXIS; - /** Returns the maximum distance/radius from the point 'center' before overlapping */ - public static double maxRadialDistance(GeoPoint center) { - if (Math.abs(center.lat()) == 90.0) { - return SloppyMath.haversin(center.lat(), center.lon(), 0, center.lon())*1000.0; - } - return SloppyMath.haversin(center.lat(), center.lon(), center.lat(), (180.0 + center.lon()) % 360)*1000.0; - } - /** Returns the minimum between the provided distance 'initialRadius' and the * maximum distance/radius from the point 'center' before overlapping **/ public static double maxRadialDistance(GeoPoint center, double initialRadius) { - final double maxRadius = maxRadialDistance(center); + final double maxRadius = GeoDistanceUtils.maxRadialDistanceMeters(center.lon(), center.lat()); return Math.min(initialRadius, maxRadius); } @@ -107,53 +99,53 @@ public class GeoUtils { } /** - * Calculate the width (in meters) of geohash cells at a specific level - * @param level geohash level must be greater or equal to zero - * @return the width of cells at level in meters + * Calculate the width (in meters) of geohash cells at a specific level + * @param level geohash level must be greater or equal to zero + * @return the width of cells at level in meters */ public static double geoHashCellWidth(int level) { assert level>=0; // Geohash cells are split into 32 cells at each level. the grid - // alternates at each level between a 8x4 and a 4x8 grid + // alternates at each level between a 8x4 and a 4x8 grid return EARTH_EQUATOR / (1L<<((((level+1)/2)*3) + ((level/2)*2))); } /** - * Calculate the width (in meters) of quadtree cells at a specific level - * @param level quadtree level must be greater or equal to zero - * @return the width of cells at level in meters + * Calculate the width (in meters) of quadtree cells at a specific level + * @param level quadtree level must be greater or equal to zero + * @return the width of cells at level in meters */ public static double quadTreeCellWidth(int level) { assert level >=0; return EARTH_EQUATOR / (1L<=0; // Geohash cells are split into 32 cells at each level. the grid - // alternates at each level between a 8x4 and a 4x8 grid + // alternates at each level between a 8x4 and a 4x8 grid return EARTH_POLAR_DISTANCE / (1L<<((((level+1)/2)*2) + ((level/2)*3))); } - + /** - * Calculate the height (in meters) of quadtree cells at a specific level - * @param level quadtree level must be greater or equal to zero - * @return the height of cells at level in meters + * Calculate the height (in meters) of quadtree cells at a specific level + * @param level quadtree level must be greater or equal to zero + * @return the height of cells at level in meters */ public static double quadTreeCellHeight(int level) { assert level>=0; return EARTH_POLAR_DISTANCE / (1L<=0; @@ -163,20 +155,20 @@ public class GeoUtils { } /** - * Calculate the size (in meters) of quadtree cells at a specific level - * @param level quadtree level must be greater or equal to zero - * @return the size of cells at level in meters + * Calculate the size (in meters) of quadtree cells at a specific level + * @param level quadtree level must be greater or equal to zero + * @return the size of cells at level in meters */ public static double quadTreeCellSize(int level) { assert level>=0; return Math.sqrt(EARTH_POLAR_DISTANCE*EARTH_POLAR_DISTANCE + EARTH_EQUATOR*EARTH_EQUATOR) / (1L<= 0; @@ -195,7 +187,7 @@ public class GeoUtils { * Calculate the number of levels needed for a specific precision. QuadTree * cells will not exceed the specified size (diagonal) of the precision. * @param distance Maximum size of cells as unit string (must greater or equal to zero) - * @return levels need to achieve precision + * @return levels need to achieve precision */ public static int quadTreeLevelsForPrecision(String distance) { return quadTreeLevelsForPrecision(DistanceUnit.METERS.parse(distance, DistanceUnit.DEFAULT)); @@ -205,11 +197,11 @@ public class GeoUtils { * Calculate the number of levels needed for a specific precision. GeoHash * cells will not exceed the specified size (diagonal) of the precision. * @param meters Maximum size of cells in meters (must greater or equal to zero) - * @return levels need to achieve precision + * @return levels need to achieve precision */ public static int geoHashLevelsForPrecision(double meters) { assert meters >= 0; - + if(meters == 0) { return GeohashPrefixTree.getMaxLevelsPossible(); } else { @@ -219,19 +211,19 @@ public class GeoUtils { if(part == 1) return 1; final int bits = (int)Math.round(Math.ceil(Math.log(part) / Math.log(2))); - final int full = bits / 5; // number of 5 bit subdivisions + final int full = bits / 5; // number of 5 bit subdivisions final int left = bits - full*5; // bit representing the last level final int even = full + (left>0?1:0); // number of even levels final int odd = full + (left>3?1:0); // number of odd levels return even+odd; } } - + /** * Calculate the number of levels needed for a specific precision. GeoHash * cells will not exceed the specified size (diagonal) of the precision. * @param distance Maximum size of cells as unit string (must greater or equal to zero) - * @return levels need to achieve precision + * @return levels need to achieve precision */ public static int geoHashLevelsForPrecision(String distance) { return geoHashLevelsForPrecision(DistanceUnit.METERS.parse(distance, DistanceUnit.DEFAULT)); @@ -355,7 +347,7 @@ public class GeoUtils { } /** * Parse a {@link GeoPoint} with a {@link XContentParser}: - * + * * @param parser {@link XContentParser} to parse the value from * @return new {@link GeoPoint} parsed from the parse */ @@ -365,14 +357,14 @@ public class GeoUtils { /** * Parse a {@link GeoPoint} with a {@link XContentParser}. A geopoint has one of the following forms: - * + * *

    *
  • Object:
    {"lat": <latitude>, "lon": <longitude>}
  • *
  • String:
    "<latitude>,<longitude>"
  • *
  • Geohash:
    "<geohash>"
  • *
  • Array:
    [<longitude>,<latitude>]
  • *
- * + * * @param parser {@link XContentParser} to parse the value from * @param point A {@link GeoPoint} that will be reset by the values parsed * @return new {@link GeoPoint} parsed from the parse @@ -381,11 +373,11 @@ public class GeoUtils { double lat = Double.NaN; double lon = Double.NaN; String geohash = null; - + if(parser.currentToken() == Token.START_OBJECT) { while(parser.nextToken() != Token.END_OBJECT) { if(parser.currentToken() == Token.FIELD_NAME) { - String field = parser.text(); + String field = parser.currentName(); if(LATITUDE.equals(field)) { parser.nextToken(); switch (parser.currentToken()) { @@ -433,7 +425,7 @@ public class GeoUtils { } else { return point.reset(lat, lon); } - + } else if(parser.currentToken() == Token.START_ARRAY) { int element = 0; while(parser.nextToken() != Token.END_ARRAY) { diff --git a/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java b/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java index 64c657c8b6f..42650275b4b 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java +++ b/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java @@ -20,11 +20,9 @@ package org.elasticsearch.common.geo; import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Rectangle; import com.spatial4j.core.shape.Shape; import com.spatial4j.core.shape.ShapeCollection; -import java.util.Collection; import java.util.List; /** diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index 5f11d12a4bf..eb77ef7a46a 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Circle; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.DistanceUnit; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index 62f29d2bad7..71b68207e74 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -21,30 +21,21 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Locale; import java.util.Objects; public class EnvelopeBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE; + public static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(); - protected Coordinate topLeft; - protected Coordinate bottomRight; - - public EnvelopeBuilder() { - this(Orientation.RIGHT); - } - - public EnvelopeBuilder(Orientation orientation) { - super(orientation); - } + private Coordinate topLeft; + private Coordinate bottomRight; public EnvelopeBuilder topLeft(Coordinate topLeft) { this.topLeft = topLeft; @@ -55,6 +46,10 @@ public class EnvelopeBuilder extends ShapeBuilder { return topLeft(coordinate(longitude, latitude)); } + public Coordinate topLeft() { + return this.topLeft; + } + public EnvelopeBuilder bottomRight(Coordinate bottomRight) { this.bottomRight = bottomRight; return this; @@ -64,11 +59,14 @@ public class EnvelopeBuilder extends ShapeBuilder { return bottomRight(coordinate(longitude, latitude)); } + public Coordinate bottomRight() { + return this.bottomRight; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); builder.startArray(FIELD_COORDINATES); toXContent(builder, topLeft); toXContent(builder, bottomRight); @@ -88,7 +86,7 @@ public class EnvelopeBuilder extends ShapeBuilder { @Override public int hashCode() { - return Objects.hash(orientation, topLeft, bottomRight); + return Objects.hash(topLeft, bottomRight); } @Override @@ -100,22 +98,19 @@ public class EnvelopeBuilder extends ShapeBuilder { return false; } EnvelopeBuilder other = (EnvelopeBuilder) obj; - return Objects.equals(orientation, other.orientation) && - Objects.equals(topLeft, other.topLeft) && + return Objects.equals(topLeft, other.topLeft) && Objects.equals(bottomRight, other.bottomRight); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(orientation == Orientation.RIGHT); writeCoordinateTo(topLeft, out); writeCoordinateTo(bottomRight, out); } @Override public EnvelopeBuilder readFrom(StreamInput in) throws IOException { - Orientation orientation = in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; - return new EnvelopeBuilder(orientation) + return new EnvelopeBuilder() .topLeft(readCoordinateFrom(in)) .bottomRight(readCoordinateFrom(in)); } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index 45397ed962f..5fc6b58e176 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -20,28 +20,25 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Shape; -import org.elasticsearch.common.geo.XShapeCollection; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; public class GeometryCollectionBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.GEOMETRYCOLLECTION; + public static final GeometryCollectionBuilder PROTOTYPE = new GeometryCollectionBuilder(); + protected final ArrayList shapes = new ArrayList<>(); - public GeometryCollectionBuilder() { - this(Orientation.RIGHT); - } - - public GeometryCollectionBuilder(Orientation orientation) { - super(orientation); - } - public GeometryCollectionBuilder shape(ShapeBuilder shape) { this.shapes.add(shape); return this; @@ -132,4 +129,39 @@ public class GeometryCollectionBuilder extends ShapeBuilder { //note: ShapeCollection is probably faster than a Multi* geom. } + @Override + public int hashCode() { + return Objects.hash(shapes); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + GeometryCollectionBuilder other = (GeometryCollectionBuilder) obj; + return Objects.equals(shapes, other.shapes); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(shapes.size()); + for (ShapeBuilder shape : shapes) { + out.writeShape(shape); + } + } + + @Override + public GeometryCollectionBuilder readFrom(StreamInput in) throws IOException { + GeometryCollectionBuilder geometryCollectionBuilder = new GeometryCollectionBuilder(); + int shapes = in.readVInt(); + for (int i = 0; i < shapes; i++) { + geometryCollectionBuilder.shape(in.readShape()); + } + return geometryCollectionBuilder; + } + } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java index c7ba9b72f55..0bf1ed8fa09 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java @@ -19,21 +19,26 @@ package org.elasticsearch.common.geo.builders; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; - -import org.elasticsearch.common.xcontent.XContentBuilder; import com.spatial4j.core.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.LineString; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Objects; public class LineStringBuilder extends PointCollection { public static final GeoShapeType TYPE = GeoShapeType.LINESTRING; + public static final LineStringBuilder PROTOTYPE = new LineStringBuilder(); + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -139,4 +144,39 @@ public class LineStringBuilder extends PointCollection { } return coordinates; } + + @Override + public int hashCode() { + return Objects.hash(points); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + LineStringBuilder other = (LineStringBuilder) obj; + return Objects.equals(points, other.points); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(points.size()); + for (Coordinate point : points) { + writeCoordinateTo(point, out); + } + } + + @Override + public LineStringBuilder readFrom(StreamInput in) throws IOException { + LineStringBuilder lineStringBuilder = new LineStringBuilder(); + int size = in.readVInt(); + for (int i=0; i < size; i++) { + lineStringBuilder.point(readCoordinateFrom(in)); + } + return lineStringBuilder; + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index a004b90a2dc..be09ae81836 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -19,21 +19,25 @@ package org.elasticsearch.common.geo.builders; -import org.elasticsearch.common.xcontent.XContentBuilder; - import com.spatial4j.core.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.LineString; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; +import java.util.Objects; public class MultiLineStringBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTILINESTRING; + public static final MultiLineStringBuilder PROTOTYPE = new MultiLineStringBuilder(); + private final ArrayList lines = new ArrayList<>(); public MultiLineStringBuilder linestring(LineStringBuilder line) { @@ -41,6 +45,10 @@ public class MultiLineStringBuilder extends ShapeBuilder { return this; } + public MultiLineStringBuilder linestring(Coordinate[] coordinates) { + return this.linestring(new LineStringBuilder().points(coordinates)); + } + public Coordinate[][] coordinates() { Coordinate[][] result = new Coordinate[lines.size()][]; for (int i = 0; i < result.length; i++) { @@ -92,4 +100,39 @@ public class MultiLineStringBuilder extends ShapeBuilder { } return jtsGeometry(geometry); } + + @Override + public int hashCode() { + return Objects.hash(lines); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + MultiLineStringBuilder other = (MultiLineStringBuilder) obj; + return Objects.equals(lines, other.lines); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(lines.size()); + for (LineStringBuilder line : lines) { + line.writeTo(out); + } + } + + @Override + public MultiLineStringBuilder readFrom(StreamInput in) throws IOException { + MultiLineStringBuilder multiLineStringBuilder = new MultiLineStringBuilder(); + int size = in.readVInt(); + for (int i = 0; i < size; i++) { + multiLineStringBuilder.linestring(LineStringBuilder.PROTOTYPE.readFrom(in)); + } + return multiLineStringBuilder; + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index 8d5cfabdabb..b0e86a819aa 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -23,17 +23,21 @@ import com.spatial4j.core.shape.Point; import com.spatial4j.core.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; public class MultiPointBuilder extends PointCollection { - public static final GeoShapeType TYPE = GeoShapeType.MULTIPOINT; + public final static MultiPointBuilder PROTOTYPE = new MultiPointBuilder(); + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -52,7 +56,7 @@ public class MultiPointBuilder extends PointCollection { for (Coordinate coord : points) { shapes.add(SPATIAL_CONTEXT.makePoint(coord.x, coord.y)); } - XShapeCollection multiPoints = new XShapeCollection<>(shapes, SPATIAL_CONTEXT); + XShapeCollection multiPoints = new XShapeCollection<>(shapes, SPATIAL_CONTEXT); multiPoints.setPointsOnly(true); return multiPoints; } @@ -61,4 +65,39 @@ public class MultiPointBuilder extends PointCollection { public GeoShapeType type() { return TYPE; } + + @Override + public int hashCode() { + return Objects.hash(points); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + MultiPointBuilder other = (MultiPointBuilder) obj; + return Objects.equals(points, other.points); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(points.size()); + for (Coordinate point : points) { + writeCoordinateTo(point, out); + } + } + + @Override + public MultiPointBuilder readFrom(StreamInput in) throws IOException { + MultiPointBuilder multiPointBuilder = new MultiPointBuilder(); + int size = in.readVInt(); + for (int i=0; i < size; i++) { + multiPointBuilder.point(readCoordinateFrom(in)); + } + return multiPointBuilder; + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index e7762e51b61..cff06dbfe59 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -19,39 +19,66 @@ package org.elasticsearch.common.geo.builders; +import com.spatial4j.core.shape.Shape; +import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; + import java.io.IOException; import java.util.ArrayList; import java.util.List; - -import org.elasticsearch.common.geo.XShapeCollection; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import com.spatial4j.core.shape.Shape; -import com.vividsolutions.jts.geom.Coordinate; +import java.util.Locale; +import java.util.Objects; public class MultiPolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOLYGON; + public static final MultiPolygonBuilder PROTOTYPE = new MultiPolygonBuilder(); - protected final ArrayList polygons = new ArrayList<>(); + private final ArrayList polygons = new ArrayList<>(); + + private Orientation orientation = Orientation.RIGHT; public MultiPolygonBuilder() { this(Orientation.RIGHT); } public MultiPolygonBuilder(Orientation orientation) { - super(orientation); + this.orientation = orientation; } + public Orientation orientation() { + return this.orientation; + } + + /** + * Add a shallow copy of the polygon to the multipolygon. This will apply the orientation of the + * {@link MultiPolygonBuilder} to the polygon if polygon has different orientation. + */ public MultiPolygonBuilder polygon(PolygonBuilder polygon) { - this.polygons.add(polygon); + PolygonBuilder pb = new PolygonBuilder(this.orientation); + pb.points(polygon.shell().coordinates(false)); + for (LineStringBuilder hole : polygon.holes()) { + pb.hole(hole); + } + this.polygons.add(pb); return this; } + /** + * get the list of polygons + */ + public ArrayList polygons() { + return polygons; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FIELD_TYPE, TYPE.shapeName()); + builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); builder.startArray(FIELD_COORDINATES); for(PolygonBuilder polygon : polygons) { builder.startArray(); @@ -89,4 +116,41 @@ public class MultiPolygonBuilder extends ShapeBuilder { return new XShapeCollection<>(shapes, SPATIAL_CONTEXT); //note: ShapeCollection is probably faster than a Multi* geom. } + + @Override + public int hashCode() { + return Objects.hash(polygons, orientation); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + MultiPolygonBuilder other = (MultiPolygonBuilder) obj; + return Objects.equals(polygons, other.polygons) && + Objects.equals(orientation, other.orientation); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + orientation.writeTo(out); + out.writeVInt(polygons.size()); + for (PolygonBuilder polygon : polygons) { + polygon.writeTo(out); + } + } + + @Override + public MultiPolygonBuilder readFrom(StreamInput in) throws IOException { + MultiPolygonBuilder polyBuilder = new MultiPolygonBuilder(Orientation.readFrom(in)); + int holes = in.readVInt(); + for (int i = 0; i < holes; i++) { + polyBuilder.polygon(PolygonBuilder.PROTOTYPE.readFrom(in)); + } + return polyBuilder; + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java index d6d62c28b8c..afb713cb09d 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java @@ -19,20 +19,18 @@ package org.elasticsearch.common.geo.builders; -import java.io.IOException; -import java.util.Objects; - +import com.spatial4j.core.shape.Point; +import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import com.spatial4j.core.shape.Point; -import com.vividsolutions.jts.geom.Coordinate; +import java.io.IOException; +import java.util.Objects; public class PointBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POINT; - public static final PointBuilder PROTOTYPE = new PointBuilder(); private Coordinate coordinate; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointCollection.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointCollection.java index 45ce5adb595..b48aacd857b 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointCollection.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointCollection.java @@ -19,15 +19,14 @@ package org.elasticsearch.common.geo.builders; +import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.common.xcontent.XContentBuilder; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import com.vividsolutions.jts.geom.Coordinate; - /** * The {@link PointCollection} is an abstract base implementation for all GeoShapes. It simply handles a set of points. */ diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 04540df27e9..026fc9aa170 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -27,8 +27,9 @@ import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.LinearRing; import com.vividsolutions.jts.geom.MultiPolygon; import com.vividsolutions.jts.geom.Polygon; - import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -38,6 +39,9 @@ import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; /** @@ -48,6 +52,11 @@ import java.util.concurrent.atomic.AtomicBoolean; public class PolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POLYGON; + public static final PolygonBuilder PROTOTYPE = new PolygonBuilder(); + + private static final Coordinate[][] EMPTY = new Coordinate[0][]; + + private Orientation orientation = Orientation.RIGHT; // line string defining the shell of the polygon private LineStringBuilder shell; @@ -56,7 +65,7 @@ public class PolygonBuilder extends ShapeBuilder { private final ArrayList holes = new ArrayList<>(); public PolygonBuilder() { - this(new ArrayList(), Orientation.RIGHT); + this(Orientation.RIGHT); } public PolygonBuilder(Orientation orientation) { @@ -64,10 +73,14 @@ public class PolygonBuilder extends ShapeBuilder { } public PolygonBuilder(ArrayList points, Orientation orientation) { - super(orientation); + this.orientation = orientation; this.shell = new LineStringBuilder().points(points); } + public Orientation orientation() { + return this.orientation; + } + public PolygonBuilder point(double longitude, double latitude) { shell.point(longitude, latitude); return this; @@ -103,6 +116,20 @@ public class PolygonBuilder extends ShapeBuilder { return this; } + /** + * @return the list of holes defined for this polygon + */ + public List holes() { + return this.holes; + } + + /** + * @return the list of points of the shell for this polygon + */ + public LineStringBuilder shell() { + return this.shell; + } + /** * Close the shell of the polygon */ @@ -175,6 +202,7 @@ public class PolygonBuilder extends ShapeBuilder { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FIELD_TYPE, TYPE.shapeName()); + builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); builder.startArray(FIELD_COORDINATES); coordinatesArray(builder, params); builder.endArray(); @@ -357,8 +385,6 @@ public class PolygonBuilder extends ShapeBuilder { return result; } - private static final Coordinate[][] EMPTY = new Coordinate[0][]; - private static Coordinate[][] holes(Edge[] holes, int numHoles) { if (numHoles == 0) { return EMPTY; @@ -663,4 +689,44 @@ public class PolygonBuilder extends ShapeBuilder { } } } + + @Override + public int hashCode() { + return Objects.hash(shell, holes, orientation); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + PolygonBuilder other = (PolygonBuilder) obj; + return Objects.equals(shell, other.shell) && + Objects.equals(holes, other.holes) && + Objects.equals(orientation, other.orientation); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + orientation.writeTo(out); + shell.writeTo(out); + out.writeVInt(holes.size()); + for (LineStringBuilder hole : holes) { + hole.writeTo(out); + } + } + + @Override + public PolygonBuilder readFrom(StreamInput in) throws IOException { + PolygonBuilder polyBuilder = new PolygonBuilder(Orientation.readFrom(in)); + polyBuilder.shell = LineStringBuilder.PROTOTYPE.readFrom(in); + int holes = in.readVInt(); + for (int i = 0; i < holes; i++) { + polyBuilder.hole(LineStringBuilder.PROTOTYPE.readFrom(in)); + } + return polyBuilder; + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index d8689ee737f..d286237e547 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -26,7 +26,6 @@ import com.spatial4j.core.shape.jts.JtsGeometry; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; - import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.io.stream.NamedWriteable; @@ -42,7 +41,11 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Locale; /** * Basic class for building GeoJSON shapes like Polygons, Linestrings, etc @@ -77,16 +80,10 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri /** @see com.spatial4j.core.shape.jts.JtsGeometry#index() */ protected final boolean autoIndexJtsGeometry = true;//may want to turn off once SpatialStrategy impls do it. - protected Orientation orientation = Orientation.RIGHT; - protected ShapeBuilder() { } - protected ShapeBuilder(Orientation orientation) { - this.orientation = orientation; - } - protected static Coordinate coordinate(double longitude, double latitude) { return new Coordinate(longitude, latitude); } @@ -186,22 +183,6 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri return new Coordinate(in.readDouble(), in.readDouble()); } - public static Orientation orientationFromString(String orientation) { - orientation = orientation.toLowerCase(Locale.ROOT); - switch (orientation) { - case "right": - case "counterclockwise": - case "ccw": - return Orientation.RIGHT; - case "left": - case "clockwise": - case "cw": - return Orientation.LEFT; - default: - throw new IllegalArgumentException("Unknown orientation [" + orientation + "]"); - } - } - protected static Coordinate shift(Coordinate coordinate, double dateline) { if (dateline == 0) { return coordinate; @@ -408,6 +389,30 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri public static final Orientation COUNTER_CLOCKWISE = Orientation.RIGHT; public static final Orientation CW = Orientation.LEFT; public static final Orientation CCW = Orientation.RIGHT; + + public void writeTo (StreamOutput out) throws IOException { + out.writeBoolean(this == Orientation.RIGHT); + } + + public static Orientation readFrom (StreamInput in) throws IOException { + return in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; + } + + public static Orientation fromString(String orientation) { + orientation = orientation.toLowerCase(Locale.ROOT); + switch (orientation) { + case "right": + case "counterclockwise": + case "ccw": + return Orientation.RIGHT; + case "left": + case "clockwise": + case "cw": + return Orientation.LEFT; + default: + throw new IllegalArgumentException("Unknown orientation [" + orientation + "]"); + } + } } public static final String FIELD_TYPE = "type"; @@ -498,7 +503,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri radius = Distance.parseDistance(parser.text()); } else if (FIELD_ORIENTATION.equals(fieldName)) { parser.nextToken(); - requestedOrientation = orientationFromString(parser.text()); + requestedOrientation = Orientation.fromString(parser.text()); } else { parser.nextToken(); parser.skipChildren(); @@ -524,7 +529,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri case POLYGON: return parsePolygon(node, requestedOrientation, coerce); case MULTIPOLYGON: return parseMultiPolygon(node, requestedOrientation, coerce); case CIRCLE: return parseCircle(node, radius); - case ENVELOPE: return parseEnvelope(node, requestedOrientation); + case ENVELOPE: return parseEnvelope(node); case GEOMETRYCOLLECTION: return geometryCollections; default: throw new ElasticsearchParseException("shape type [{}] not included", shapeType); @@ -550,7 +555,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri return ShapeBuilders.newCircleBuilder().center(coordinates.coordinate).radius(radius); } - protected static EnvelopeBuilder parseEnvelope(CoordinateNode coordinates, final Orientation orientation) { + protected static EnvelopeBuilder parseEnvelope(CoordinateNode coordinates) { // validate the coordinate array for envelope type if (coordinates.children.size() != 2) { throw new ElasticsearchParseException("invalid number of points [{}] provided for " + @@ -564,7 +569,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri uL = new Coordinate(Math.min(uL.x, lR.x), Math.max(uL.y, lR.y)); lR = new Coordinate(Math.max(uLtmp.x, lR.x), Math.min(uLtmp.y, lR.y)); } - return ShapeBuilders.newEnvelope(orientation).topLeft(uL).bottomRight(lR); + return ShapeBuilders.newEnvelope().topLeft(uL).bottomRight(lR); } protected static void validateMultiPointNode(CoordinateNode coordinates) { @@ -684,8 +689,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri } XContentParser.Token token = parser.nextToken(); - GeometryCollectionBuilder geometryCollection = ShapeBuilders.newGeometryCollection( (mapper == null) ? Orientation.RIGHT : mapper - .fieldType().orientation()); + GeometryCollectionBuilder geometryCollection = ShapeBuilders.newGeometryCollection(); while (token != XContentParser.Token.END_ARRAY) { ShapeBuilder shapeBuilder = GeoShapeType.parse(parser); geometryCollection.shape(shapeBuilder); @@ -700,15 +704,4 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri public String getWriteableName() { return type().shapeName(); } - - // NORELEASE this should be deleted as soon as all shape builders implement writable - @Override - public void writeTo(StreamOutput out) throws IOException { - } - - // NORELEASE this should be deleted as soon as all shape builders implement writable - @Override - public ShapeBuilder readFrom(StreamInput in) throws IOException { - return null; - } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java new file mode 100644 index 00000000000..c66e969aa3a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import org.elasticsearch.common.geo.ShapesAvailability; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +/** + * Register the shape builder prototypes with the {@link NamedWriteableRegistry} + */ +public class ShapeBuilderRegistry { + + @Inject + public ShapeBuilderRegistry(NamedWriteableRegistry namedWriteableRegistry) { + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, GeometryCollectionBuilder.PROTOTYPE); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilders.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilders.java index e294a9d6ef7..61d7a9cd07e 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilders.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilders.java @@ -110,15 +110,6 @@ public class ShapeBuilders { return new GeometryCollectionBuilder(); } - /** - * Create a new GeometryCollection - * - * @return a new {@link GeometryCollectionBuilder} - */ - public static GeometryCollectionBuilder newGeometryCollection(ShapeBuilder.Orientation orientation) { - return new GeometryCollectionBuilder(orientation); - } - /** * create a new Circle * @@ -136,13 +127,4 @@ public class ShapeBuilders { public static EnvelopeBuilder newEnvelope() { return new EnvelopeBuilder(); } - - /** - * create a new rectangle - * - * @return a new {@link EnvelopeBuilder} - */ - public static EnvelopeBuilder newEnvelope(ShapeBuilder.Orientation orientation) { - return new EnvelopeBuilder(orientation); - } } diff --git a/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java b/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java index 01079ecf45a..b99ef895430 100644 --- a/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java +++ b/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java @@ -30,7 +30,11 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.unit.TimeValue; -import java.io.*; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.PrintWriter; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLConnection; diff --git a/core/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java b/core/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java index 154ce88b245..8b501a56129 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java +++ b/core/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java @@ -17,7 +17,17 @@ package org.elasticsearch.common.inject; import org.elasticsearch.common.inject.internal.Errors; -import org.elasticsearch.common.inject.spi.*; +import org.elasticsearch.common.inject.spi.Element; +import org.elasticsearch.common.inject.spi.ElementVisitor; +import org.elasticsearch.common.inject.spi.InjectionRequest; +import org.elasticsearch.common.inject.spi.MembersInjectorLookup; +import org.elasticsearch.common.inject.spi.Message; +import org.elasticsearch.common.inject.spi.PrivateElements; +import org.elasticsearch.common.inject.spi.ProviderLookup; +import org.elasticsearch.common.inject.spi.ScopeBinding; +import org.elasticsearch.common.inject.spi.StaticInjectionRequest; +import org.elasticsearch.common.inject.spi.TypeConverterBinding; +import org.elasticsearch.common.inject.spi.TypeListenerBinding; import java.util.Iterator; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/common/inject/ConstantFactory.java b/core/src/main/java/org/elasticsearch/common/inject/ConstantFactory.java index d2fb6ae4121..87bf31e911e 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/ConstantFactory.java +++ b/core/src/main/java/org/elasticsearch/common/inject/ConstantFactory.java @@ -16,7 +16,11 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.InternalContext; +import org.elasticsearch.common.inject.internal.InternalFactory; +import org.elasticsearch.common.inject.internal.ToStringBuilder; import org.elasticsearch.common.inject.spi.Dependency; /** diff --git a/core/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java b/core/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java index b57f92e7958..49ada56cefa 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java +++ b/core/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.inject.spi.InjectionPoint; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Modifier; /** * Produces construction proxies that invoke the class constructor. diff --git a/core/src/main/java/org/elasticsearch/common/inject/ExposedKeyFactory.java b/core/src/main/java/org/elasticsearch/common/inject/ExposedKeyFactory.java index 0e001080fbe..efc10b27e49 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/ExposedKeyFactory.java +++ b/core/src/main/java/org/elasticsearch/common/inject/ExposedKeyFactory.java @@ -16,7 +16,11 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.BindingImpl; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.InternalContext; +import org.elasticsearch.common.inject.internal.InternalFactory; import org.elasticsearch.common.inject.spi.Dependency; import org.elasticsearch.common.inject.spi.PrivateElements; diff --git a/core/src/main/java/org/elasticsearch/common/inject/FactoryProxy.java b/core/src/main/java/org/elasticsearch/common/inject/FactoryProxy.java index b275ea67a82..c4686e074d0 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/FactoryProxy.java +++ b/core/src/main/java/org/elasticsearch/common/inject/FactoryProxy.java @@ -17,7 +17,11 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.InternalContext; +import org.elasticsearch.common.inject.internal.InternalFactory; +import org.elasticsearch.common.inject.internal.ToStringBuilder; import org.elasticsearch.common.inject.spi.Dependency; /** diff --git a/core/src/main/java/org/elasticsearch/common/inject/Inject.java b/core/src/main/java/org/elasticsearch/common/inject/Inject.java index ff67b645f2b..a79acab89e5 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/Inject.java +++ b/core/src/main/java/org/elasticsearch/common/inject/Inject.java @@ -20,7 +20,9 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.Target; -import static java.lang.annotation.ElementType.*; +import static java.lang.annotation.ElementType.CONSTRUCTOR; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** diff --git a/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java b/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java index 8cffd7e63b4..8739d9182d8 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java +++ b/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java @@ -16,7 +16,11 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.InternalContext; +import org.elasticsearch.common.inject.internal.InternalFactory; +import org.elasticsearch.common.inject.internal.SourceProvider; import org.elasticsearch.common.inject.spi.Dependency; import java.util.Objects; diff --git a/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java b/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java index 81ee9cbbe67..32494cc4cd3 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java +++ b/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java @@ -19,7 +19,15 @@ package org.elasticsearch.common.inject; import org.elasticsearch.common.inject.internal.MoreTypes; import org.elasticsearch.common.inject.util.Types; -import java.lang.reflect.*; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; +import java.lang.reflect.GenericArrayType; +import java.lang.reflect.Member; +import java.lang.reflect.Method; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.lang.reflect.TypeVariable; +import java.lang.reflect.WildcardType; import java.util.Arrays; import java.util.List; import java.util.Objects; diff --git a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Assisted.java b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Assisted.java index 32b1d60bc14..f2c8d49a8a3 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Assisted.java +++ b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Assisted.java @@ -21,7 +21,9 @@ import org.elasticsearch.common.inject.BindingAnnotation; import java.lang.annotation.Retention; import java.lang.annotation.Target; -import static java.lang.annotation.ElementType.*; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** diff --git a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Parameter.java b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Parameter.java index e067cc813bd..0fae9dede5b 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Parameter.java +++ b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/Parameter.java @@ -16,7 +16,11 @@ package org.elasticsearch.common.inject.assistedinject; -import org.elasticsearch.common.inject.*; +import org.elasticsearch.common.inject.BindingAnnotation; +import org.elasticsearch.common.inject.ConfigurationException; +import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.inject.Key; +import org.elasticsearch.common.inject.Provider; import java.lang.annotation.Annotation; import java.lang.reflect.ParameterizedType; diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/Nullable.java b/core/src/main/java/org/elasticsearch/common/inject/internal/Nullable.java index 4dd499e4328..764e93473dd 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/Nullable.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/Nullable.java @@ -16,7 +16,11 @@ package org.elasticsearch.common.inject.internal; -import java.lang.annotation.*; +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** * The presence of this annotation on a method parameter indicates that diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java b/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java index 38456a4d04b..4b3660fe47a 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java @@ -16,17 +16,41 @@ package org.elasticsearch.common.inject.spi; -import org.elasticsearch.common.inject.*; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Binder; +import org.elasticsearch.common.inject.Binding; +import org.elasticsearch.common.inject.Key; +import org.elasticsearch.common.inject.MembersInjector; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.inject.PrivateBinder; +import org.elasticsearch.common.inject.PrivateModule; +import org.elasticsearch.common.inject.Provider; +import org.elasticsearch.common.inject.Scope; +import org.elasticsearch.common.inject.Stage; +import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.binder.AnnotatedBindingBuilder; import org.elasticsearch.common.inject.binder.AnnotatedConstantBindingBuilder; import org.elasticsearch.common.inject.binder.AnnotatedElementBuilder; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.AbstractBindingBuilder; +import org.elasticsearch.common.inject.internal.BindingBuilder; +import org.elasticsearch.common.inject.internal.ConstantBindingBuilderImpl; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ExposureBuilder; +import org.elasticsearch.common.inject.internal.PrivateElementsImpl; +import org.elasticsearch.common.inject.internal.ProviderMethodsModule; +import org.elasticsearch.common.inject.internal.SourceProvider; import org.elasticsearch.common.inject.matcher.Matcher; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import java.lang.annotation.Annotation; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; /** * Exposes elements of a module so they can be inspected, validated or {@link diff --git a/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java b/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java index e53e7a73eb7..08761f84ff5 100644 --- a/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java +++ b/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.io; import org.apache.lucene.util.IOUtils; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.logging.ESLogger; import java.io.BufferedReader; @@ -30,7 +29,14 @@ import java.io.Reader; import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; -import java.nio.file.*; +import java.nio.file.DirectoryNotEmptyException; +import java.nio.file.DirectoryStream; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; @@ -273,7 +279,7 @@ public final class FileSystemUtils { Files.walkFileTree(source, new TreeCopier(source, destination, true)); } } - + // TODO: note that this will fail if source and target are on different NIO.2 filesystems. static class TreeCopier extends SimpleFileVisitor { diff --git a/core/src/main/java/org/elasticsearch/common/io/Streams.java b/core/src/main/java/org/elasticsearch/common/io/Streams.java index 36b1d9445b0..f922fde3e75 100644 --- a/core/src/main/java/org/elasticsearch/common/io/Streams.java +++ b/core/src/main/java/org/elasticsearch/common/io/Streams.java @@ -19,8 +19,6 @@ package org.elasticsearch.common.io; -import java.nio.charset.StandardCharsets; - import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.util.Callback; @@ -32,6 +30,7 @@ import java.io.OutputStream; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Objects; diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 20859e2716a..ab6dd542845 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -33,7 +33,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.text.StringAndBytesText; +import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; @@ -46,13 +46,21 @@ import java.io.FileNotFoundException; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.file.AccessDeniedException; +import java.nio.file.AtomicMoveNotSupportedException; +import java.nio.file.DirectoryNotEmptyException; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.FileSystemException; +import java.nio.file.FileSystemLoopException; import java.nio.file.NoSuchFileException; +import java.nio.file.NotDirectoryException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.function.Function; import java.util.function.Supplier; import static org.elasticsearch.ElasticsearchException.readException; @@ -256,13 +264,13 @@ public abstract class StreamInput extends InputStream { if (length == -1) { return null; } - return new StringAndBytesText(readBytesReference(length)); + return new Text(readBytesReference(length)); } public Text readText() throws IOException { // use StringAndBytes so we can cache the string if its ever converted to it int length = readInt(); - return new StringAndBytesText(readBytesReference(length)); + return new Text(readBytesReference(length)); } @Nullable @@ -592,11 +600,41 @@ public abstract class StreamInput extends InputStream { case 13: return (T) readStackTrace(new FileNotFoundException(readOptionalString()), this); case 14: + final int subclass = readVInt(); final String file = readOptionalString(); final String other = readOptionalString(); final String reason = readOptionalString(); readOptionalString(); // skip the msg - it's composed from file, other and reason - return (T) readStackTrace(new NoSuchFileException(file, other, reason), this); + final Throwable throwable; + switch (subclass) { + case 0: + throwable = new NoSuchFileException(file, other, reason); + break; + case 1: + throwable = new NotDirectoryException(file); + break; + case 2: + throwable = new DirectoryNotEmptyException(file); + break; + case 3: + throwable = new AtomicMoveNotSupportedException(file, other, reason); + break; + case 4: + throwable = new FileAlreadyExistsException(file, other, reason); + break; + case 5: + throwable = new AccessDeniedException(file, other, reason); + break; + case 6: + throwable = new FileSystemLoopException(file); + break; + case 7: + throwable = new FileSystemException(file, other, reason); + break; + default: + throw new IllegalStateException("unknown FileSystemException with index " + subclass); + } + return (T) readStackTrace(throwable, this); case 15: return (T) readStackTrace(new OutOfMemoryError(readOptionalString()), this); case 16: @@ -605,6 +643,8 @@ public abstract class StreamInput extends InputStream { return (T) readStackTrace(new LockObtainFailedException(readOptionalString(), readThrowable()), this); case 18: return (T) readStackTrace(new InterruptedException(readOptionalString()), this); + case 19: + return (T) readStackTrace(new IOException(readOptionalString(), readThrowable()), this); default: assert false : "no such exception for id: " + key; } @@ -629,6 +669,13 @@ public abstract class StreamInput extends InputStream { return readNamedWriteable(QueryBuilder.class); } + /** + * Reads a {@link ShapeBuilder} from the current stream + */ + public ShapeBuilder readShape() throws IOException { + return readNamedWriteable(ShapeBuilder.class); + } + /** * Reads a {@link org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder} from the current stream */ @@ -636,6 +683,18 @@ public abstract class StreamInput extends InputStream { return readNamedWriteable(ScoreFunctionBuilder.class); } + /** + * Reads a list of objects + */ + public List readList(StreamInputReader reader) throws IOException { + int count = readVInt(); + List builder = new ArrayList<>(count); + for (int i=0; i { + T read(StreamInput t) throws IOException; } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 5f1e7623d28..b423841acd0 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -32,6 +32,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; @@ -41,7 +42,15 @@ import java.io.EOFException; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; +import java.nio.channels.ClosedChannelException; +import java.nio.file.AccessDeniedException; +import java.nio.file.AtomicMoveNotSupportedException; +import java.nio.file.DirectoryNotEmptyException; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.FileSystemException; +import java.nio.file.FileSystemLoopException; import java.nio.file.NoSuchFileException; +import java.nio.file.NotDirectoryException; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; @@ -564,11 +573,28 @@ public abstract class StreamOutput extends OutputStream { } else if (throwable instanceof FileNotFoundException) { writeVInt(13); writeCause = false; - } else if (throwable instanceof NoSuchFileException) { + } else if (throwable instanceof FileSystemException) { writeVInt(14); - writeOptionalString(((NoSuchFileException) throwable).getFile()); - writeOptionalString(((NoSuchFileException) throwable).getOtherFile()); - writeOptionalString(((NoSuchFileException) throwable).getReason()); + if (throwable instanceof NoSuchFileException) { + writeVInt(0); + } else if (throwable instanceof NotDirectoryException) { + writeVInt(1); + } else if (throwable instanceof DirectoryNotEmptyException) { + writeVInt(2); + } else if (throwable instanceof AtomicMoveNotSupportedException) { + writeVInt(3); + } else if (throwable instanceof FileAlreadyExistsException) { + writeVInt(4); + } else if (throwable instanceof AccessDeniedException) { + writeVInt(5); + } else if (throwable instanceof FileSystemLoopException) { + writeVInt(6); + } else { + writeVInt(7); + } + writeOptionalString(((FileSystemException) throwable).getFile()); + writeOptionalString(((FileSystemException) throwable).getOtherFile()); + writeOptionalString(((FileSystemException) throwable).getReason()); writeCause = false; } else if (throwable instanceof OutOfMemoryError) { writeVInt(15); @@ -580,6 +606,8 @@ public abstract class StreamOutput extends OutputStream { } else if (throwable instanceof InterruptedException) { writeVInt(18); writeCause = false; + } else if (throwable instanceof IOException) { + writeVInt(19); } else { ElasticsearchException ex; if (throwable instanceof ElasticsearchException && ElasticsearchException.isRegistered(throwable.getClass())) { @@ -618,6 +646,13 @@ public abstract class StreamOutput extends OutputStream { writeNamedWriteable(queryBuilder); } + /** + * Writes a {@link ShapeBuilder} to the current stream + */ + public void writeShape(ShapeBuilder shapeBuilder) throws IOException { + writeNamedWriteable(shapeBuilder); + } + /** * Writes a {@link ScoreFunctionBuilder} to the current stream */ @@ -632,4 +667,14 @@ public abstract class StreamOutput extends OutputStream { writeDouble(geoPoint.lat()); writeDouble(geoPoint.lon()); } + + /** + * Writes a list of {@link Writeable} objects + */ + public > void writeList(List list) throws IOException { + writeVInt(list.size()); + for (T obj: list) { + obj.writeTo(this); + } + } } diff --git a/core/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java b/core/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java index d29687e2dfa..428828bc0fe 100644 --- a/core/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java +++ b/core/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java @@ -19,10 +19,10 @@ package org.elasticsearch.common.joda; -import java.util.Locale; - import org.joda.time.format.DateTimeFormatter; +import java.util.Locale; + /** * A simple wrapper around {@link DateTimeFormatter} that retains the * format that was used to create it. @@ -34,7 +34,7 @@ public class FormatDateTimeFormatter { private final DateTimeFormatter parser; private final DateTimeFormatter printer; - + private final Locale locale; public FormatDateTimeFormatter(String format, DateTimeFormatter parser, Locale locale) { @@ -47,7 +47,7 @@ public class FormatDateTimeFormatter { this.printer = locale == null ? printer.withDefaultYear(1970) : printer.withLocale(locale).withDefaultYear(1970); this.parser = locale == null ? parser.withDefaultYear(1970) : parser.withLocale(locale).withDefaultYear(1970); } - + public String format() { return format; } @@ -59,7 +59,7 @@ public class FormatDateTimeFormatter { public DateTimeFormatter printer() { return this.printer; } - + public Locale locale() { return locale; } diff --git a/core/src/main/java/org/elasticsearch/common/joda/Joda.java b/core/src/main/java/org/elasticsearch/common/joda/Joda.java index 174fe22e15b..34ca5f77da5 100644 --- a/core/src/main/java/org/elasticsearch/common/joda/Joda.java +++ b/core/src/main/java/org/elasticsearch/common/joda/Joda.java @@ -20,11 +20,25 @@ package org.elasticsearch.common.joda; import org.elasticsearch.common.Strings; -import org.joda.time.*; +import org.joda.time.Chronology; +import org.joda.time.DateTime; +import org.joda.time.DateTimeField; +import org.joda.time.DateTimeFieldType; +import org.joda.time.DateTimeZone; +import org.joda.time.DurationField; +import org.joda.time.DurationFieldType; +import org.joda.time.ReadablePartial; import org.joda.time.field.DividedDateTimeField; import org.joda.time.field.OffsetDateTimeField; import org.joda.time.field.ScaledDurationField; -import org.joda.time.format.*; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; +import org.joda.time.format.DateTimeFormatterBuilder; +import org.joda.time.format.DateTimeParser; +import org.joda.time.format.DateTimeParserBucket; +import org.joda.time.format.DateTimePrinter; +import org.joda.time.format.ISODateTimeFormat; +import org.joda.time.format.StrictISODateTimeFormat; import java.io.IOException; import java.io.Writer; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 16a9796d8b6..558e92c4fb8 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -24,8 +24,34 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.index.*; -import org.apache.lucene.search.*; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.IndexFormatTooNewException; +import org.apache.lucene.index.IndexFormatTooOldException; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.SegmentCommitInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.SimpleCollector; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.TimeLimitingCollector; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopFieldDocs; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; @@ -49,7 +75,12 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import java.io.IOException; import java.text.ParseException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; /** * @@ -253,7 +284,8 @@ public class Lucene { continue; } final Bits liveDocs = context.reader().getLiveDocs(); - for (int doc = scorer.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = scorer.nextDoc()) { + final DocIdSetIterator iterator = scorer.iterator(); + for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) { if (liveDocs == null || liveDocs.get(doc)) { return true; } @@ -593,7 +625,7 @@ public class Lucene { /** * Returns true iff the given exception or - * one of it's causes is an instance of {@link CorruptIndexException}, + * one of it's causes is an instance of {@link CorruptIndexException}, * {@link IndexFormatTooOldException}, or {@link IndexFormatTooNewException} otherwise false. */ public static boolean isCorruptionException(Throwable t) { @@ -636,19 +668,11 @@ public class Lucene { throw new IllegalStateException(message); } @Override - public int advance(int arg0) throws IOException { - throw new IllegalStateException(message); - } - @Override - public long cost() { - throw new IllegalStateException(message); - } - @Override public int docID() { throw new IllegalStateException(message); } @Override - public int nextDoc() throws IOException { + public DocIdSetIterator iterator() { throw new IllegalStateException(message); } }; @@ -726,10 +750,10 @@ public class Lucene { if (scorer == null) { return new Bits.MatchNoBits(maxDoc); } - final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator(); + final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator(); final DocIdSetIterator iterator; if (twoPhase == null) { - iterator = scorer; + iterator = scorer.iterator(); } else { iterator = twoPhase.approximation(); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java b/core/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java index e9c58a78a58..e1fc0171bb7 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java @@ -20,7 +20,11 @@ package org.elasticsearch.common.lucene; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.*; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.ScoreCachingWrappingScorer; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.SimpleCollector; import java.io.IOException; @@ -65,4 +69,4 @@ public class MinimumScoreCollector extends SimpleCollector { public boolean needsScores() { return true; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java index c405de129a5..b1271e7338d 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java @@ -23,7 +23,13 @@ import org.apache.lucene.index.LeafReader; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; -import java.util.*; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.Map; +import java.util.Set; /** * A map between segment core cache keys and the shard that these segments diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java index 7191c96e33e..c3ea39a06a2 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java @@ -29,6 +29,7 @@ import org.apache.lucene.index.TermState; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; @@ -120,7 +121,7 @@ public final class AllTermQuery extends Query { public Explanation explain(LeafReaderContext context, int doc) throws IOException { AllTermScorer scorer = scorer(context); if (scorer != null) { - int newDoc = scorer.advance(doc); + int newDoc = scorer.iterator().advance(doc); if (newDoc == doc) { float score = scorer.score(); float freq = scorer.freq(); @@ -149,6 +150,10 @@ public final class AllTermQuery extends Query { return null; } final TermState state = termStates.get(context.ord); + if (state == null) { + // Term does not exist in this segment + return null; + } termsEnum.seekExact(term.bytes(), state); PostingsEnum docs = termsEnum.postings(null, PostingsEnum.PAYLOADS); assert docs != null; @@ -209,18 +214,8 @@ public final class AllTermQuery extends Query { } @Override - public int nextDoc() throws IOException { - return postings.nextDoc(); - } - - @Override - public int advance(int target) throws IOException { - return postings.advance(target); - } - - @Override - public long cost() { - return postings.cost(); + public DocIdSetIterator iterator() { + return postings; } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java index b394b50683e..c4543435917 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java @@ -18,7 +18,10 @@ */ package org.elasticsearch.common.lucene.index; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FilterDirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.index.shard.ShardId; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java index aff0fa69f09..1094a7b1840 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java @@ -18,7 +18,8 @@ */ package org.elasticsearch.common.lucene.index; -import org.apache.lucene.index.*; +import org.apache.lucene.index.FilterLeafReader; +import org.apache.lucene.index.LeafReader; import org.elasticsearch.index.shard.ShardId; /** diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index 47ed0dbe3f4..0aab078d4ef 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FilteredDocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; @@ -99,11 +100,12 @@ public class FilterableTermsEnum extends TermsEnum { } BitSet bits = null; if (weight != null) { - DocIdSetIterator docs = weight.scorer(context); - if (docs == null) { + Scorer scorer = weight.scorer(context); + if (scorer == null) { // fully filtered, none matching, no need to iterate on this continue; } + DocIdSetIterator docs = scorer.iterator(); // we want to force apply deleted docs final Bits liveDocs = context.reader().getLiveDocs(); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/EmptyScorer.java b/core/src/main/java/org/elasticsearch/common/lucene/search/EmptyScorer.java deleted file mode 100644 index 836c7fdecce..00000000000 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/EmptyScorer.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.lucene.search; - -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.BytesRef; - -import java.io.IOException; - -/** - * - */ -public class EmptyScorer extends Scorer { - - private int docId = -1; - - public EmptyScorer(Weight weight) { - super(weight); - } - - @Override - public float score() throws IOException { - throw new UnsupportedOperationException("Should never be called"); - } - - @Override - public int freq() throws IOException { - throw new UnsupportedOperationException("Should never be called"); - } - - @Override - public int docID() { - return docId; - } - - @Override - public int nextDoc() throws IOException { - assert docId != NO_MORE_DOCS; - return docId = NO_MORE_DOCS; - } - - @Override - public int advance(int target) throws IOException { - return slowAdvance(target); - } - - @Override - public long cost() { - return 0; - } -} diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java index 8b1dcd9dfcf..fbe0c28e341 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java @@ -22,11 +22,15 @@ package org.elasticsearch.common.lucene.search; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.index.*; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.similarities.DefaultSimilarity; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.lucene.util.BytesRef; @@ -35,7 +39,11 @@ import org.elasticsearch.common.io.FastStringReader; import java.io.IOException; import java.io.Reader; -import java.util.*; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; /** * @@ -130,7 +138,7 @@ public class MoreLikeThisQuery extends Query { if (rewritten != this) { return rewritten; } - XMoreLikeThis mlt = new XMoreLikeThis(reader, similarity == null ? new DefaultSimilarity() : similarity); + XMoreLikeThis mlt = new XMoreLikeThis(reader, similarity == null ? new ClassicSimilarity() : similarity); mlt.setFieldNames(moreLikeFields); mlt.setAnalyzer(analyzer); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java index 662c3294151..d1efdc3ede2 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -20,7 +20,11 @@ package org.elasticsearch.common.lucene.search; import com.carrotsearch.hppc.ObjectHashSet; -import org.apache.lucene.index.*; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.Query; @@ -29,7 +33,11 @@ import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.ToStringUtils; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.ListIterator; public class MultiPhrasePrefixQuery extends Query { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index b7f534d2124..73c3fc9400d 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -20,8 +20,14 @@ package org.elasticsearch.common.lucene.search; import org.apache.lucene.index.Term; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; @@ -173,8 +179,6 @@ public class Queries { result = calc < 0 ? result + calc : calc; } - return (optionalClauseCount < result ? - optionalClauseCount : (result < 0 ? 0 : result)); - + return result < 0 ? 0 : result; } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java index 53159660089..16378523b5c 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java @@ -38,9 +38,21 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.document.Document; -import org.apache.lucene.index.*; -import org.apache.lucene.search.*; -import org.apache.lucene.search.similarities.DefaultSimilarity; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.MultiFields; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRefBuilder; @@ -50,7 +62,12 @@ import org.elasticsearch.common.io.FastStringReader; import java.io.IOException; import java.io.Reader; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; /** @@ -287,7 +304,7 @@ public final class XMoreLikeThis { /** * For idf() calculations. */ - private TFIDFSimilarity similarity;// = new DefaultSimilarity(); + private TFIDFSimilarity similarity;// = new ClassicSimilarity(); /** * IndexReader to use @@ -329,7 +346,7 @@ public final class XMoreLikeThis { * Constructor requiring an IndexReader. */ public XMoreLikeThis(IndexReader ir) { - this(ir, new DefaultSimilarity()); + this(ir, new ClassicSimilarity()); } public XMoreLikeThis(IndexReader ir, TFIDFSimilarity sim) { @@ -845,7 +862,7 @@ public final class XMoreLikeThis { while(docs != null && docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { freq += docs.freq(); } - + // increment frequency Int cnt = termFreqMap.get(term); if (cnt == null) { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java deleted file mode 100644 index 709c7df7898..00000000000 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.lucene.search.function; - -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; - -import java.io.IOException; - -abstract class CustomBoostFactorScorer extends Scorer { - - final Scorer scorer; - final float maxBoost; - final CombineFunction scoreCombiner; - - Float minScore; - NextDoc nextDoc; - - CustomBoostFactorScorer(Weight w, Scorer scorer, float maxBoost, CombineFunction scoreCombiner, Float minScore) - throws IOException { - super(w); - if (minScore == null) { - nextDoc = new AnyNextDoc(); - } else { - nextDoc = new MinScoreNextDoc(); - } - this.scorer = scorer; - this.maxBoost = maxBoost; - this.scoreCombiner = scoreCombiner; - this.minScore = minScore; - } - - @Override - public int docID() { - return scorer.docID(); - } - - @Override - public int advance(int target) throws IOException { - return nextDoc.advance(target); - } - - @Override - public int nextDoc() throws IOException { - return nextDoc.nextDoc(); - } - - public abstract float innerScore() throws IOException; - - @Override - public float score() throws IOException { - return nextDoc.score(); - } - - @Override - public int freq() throws IOException { - return scorer.freq(); - } - - @Override - public long cost() { - return scorer.cost(); - } - - public interface NextDoc { - public int advance(int target) throws IOException; - - public int nextDoc() throws IOException; - - public float score() throws IOException; - } - - public class MinScoreNextDoc implements NextDoc { - float currentScore = Float.MAX_VALUE * -1.0f; - - @Override - public int nextDoc() throws IOException { - int doc; - do { - doc = scorer.nextDoc(); - if (doc == NO_MORE_DOCS) { - return doc; - } - currentScore = innerScore(); - } while (currentScore < minScore); - return doc; - } - - @Override - public float score() throws IOException { - return currentScore; - } - - @Override - public int advance(int target) throws IOException { - int doc = scorer.advance(target); - if (doc == NO_MORE_DOCS) { - return doc; - } - currentScore = innerScore(); - if (currentScore < minScore) { - return scorer.nextDoc(); - } - return doc; - } - } - - public class AnyNextDoc implements NextDoc { - - @Override - public int nextDoc() throws IOException { - return scorer.nextDoc(); - } - - @Override - public float score() throws IOException { - return innerScore(); - } - - @Override - public int advance(int target) throws IOException { - return scorer.advance(target); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index 3da5ae0e4ab..a7b7300c9b6 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.FilterScorer; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; @@ -35,7 +36,12 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.Set; /** * A query that allows for a pluggable boost function / filter. If it matches @@ -137,7 +143,7 @@ public class FiltersFunctionScoreQuery extends Query { @Override public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - if (needsScores == false) { + if (needsScores == false && minScore == null) { return subQuery.createWeight(searcher, needsScores); } @@ -179,11 +185,7 @@ public class FiltersFunctionScoreQuery extends Query { subQueryWeight.normalize(norm, boost); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - // we ignore scoreDocsInOrder parameter, because we need to score in - // order if documents are scored with a script. The - // ShardLookup depends on in order scoring. + private FiltersFunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); if (subQueryScorer == null) { return null; @@ -196,15 +198,24 @@ public class FiltersFunctionScoreQuery extends Query { Scorer filterScorer = filterWeights[i].scorer(context); docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer); } - return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, minScore, needsScores); + return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, needsScores); + } + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + Scorer scorer = functionScorer(context); + if (scorer != null && minScore != null) { + scorer = new MinScoreScorer(this, scorer, minScore); + } + return scorer; } @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - Explanation subQueryExpl = subQueryWeight.explain(context, doc); - if (!subQueryExpl.isMatch()) { - return subQueryExpl; + Explanation expl = subQueryWeight.explain(context, doc); + if (!expl.isMatch()) { + return expl; } // First: Gather explanations for all filters List filterExplanations = new ArrayList<>(); @@ -213,7 +224,7 @@ public class FiltersFunctionScoreQuery extends Query { filterWeights[i].scorer(context)); if (docSet.get(doc)) { FilterFunction filterFunction = filterFunctions[i]; - Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl); + Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, expl); double factor = functionExplanation.getValue(); float sc = CombineFunction.toFloat(factor); Explanation filterExplanation = Explanation.match(sc, "function score, product of:", @@ -221,46 +232,52 @@ public class FiltersFunctionScoreQuery extends Query { filterExplanations.add(filterExplanation); } } - if (filterExplanations.size() == 0) { - return subQueryExpl; + if (filterExplanations.size() > 0) { + FiltersFunctionFactorScorer scorer = functionScorer(context); + int actualDoc = scorer.iterator().advance(doc); + assert (actualDoc == doc); + double score = scorer.computeScore(doc, expl.getValue()); + Explanation factorExplanation = Explanation.match( + CombineFunction.toFloat(score), + "function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]", + filterExplanations); + expl = combineFunction.explain(expl, factorExplanation, maxBoost); } - - FiltersFunctionFactorScorer scorer = (FiltersFunctionFactorScorer)scorer(context); - int actualDoc = scorer.advance(doc); - assert (actualDoc == doc); - double score = scorer.computeScore(doc, subQueryExpl.getValue()); - Explanation factorExplanation = Explanation.match( - CombineFunction.toFloat(score), - "function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]", - filterExplanations); - return combineFunction.explain(subQueryExpl, factorExplanation, maxBoost); + if (minScore != null && minScore > expl.getValue()) { + expl = Explanation.noMatch("Score value is too low, expected at least " + minScore + " but got " + expl.getValue(), expl); + } + return expl; } } - static class FiltersFunctionFactorScorer extends CustomBoostFactorScorer { + static class FiltersFunctionFactorScorer extends FilterScorer { private final FilterFunction[] filterFunctions; private final ScoreMode scoreMode; private final LeafScoreFunction[] functions; private final Bits[] docSets; + private final CombineFunction scoreCombiner; + private final float maxBoost; private final boolean needsScores; private FiltersFunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, FilterFunction[] filterFunctions, - float maxBoost, LeafScoreFunction[] functions, Bits[] docSets, CombineFunction scoreCombiner, Float minScore, boolean needsScores) throws IOException { - super(w, scorer, maxBoost, scoreCombiner, minScore); + float maxBoost, LeafScoreFunction[] functions, Bits[] docSets, CombineFunction scoreCombiner, boolean needsScores) throws IOException { + super(scorer, w); this.scoreMode = scoreMode; this.filterFunctions = filterFunctions; this.functions = functions; this.docSets = docSets; + this.scoreCombiner = scoreCombiner; + this.maxBoost = maxBoost; this.needsScores = needsScores; } @Override - public float innerScore() throws IOException { - int docId = scorer.docID(); + public float score() throws IOException { + int docId = docID(); // Even if the weight is created with needsScores=false, it might // be costly to call score(), so we explicitly check if scores // are needed - float subQueryScore = needsScores ? scorer.score() : 0f; + float subQueryScore = needsScores ? super.score() : 0f; double factor = computeScore(docId, subQueryScore); return scoreCombiner.combine(subQueryScore, factor, maxBoost); } @@ -352,12 +369,13 @@ public class FiltersFunctionScoreQuery extends Query { } FiltersFunctionScoreQuery other = (FiltersFunctionScoreQuery) o; return Objects.equals(this.subQuery, other.subQuery) && this.maxBoost == other.maxBoost && - Objects.equals(this.combineFunction, other.combineFunction) && Objects.equals(this.minScore, other.minScore) && - Arrays.equals(this.filterFunctions, other.filterFunctions); + Objects.equals(this.combineFunction, other.combineFunction) && Objects.equals(this.minScore, other.minScore) && + Objects.equals(this.scoreMode, other.scoreMode) && + Arrays.equals(this.filterFunctions, other.filterFunctions); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), subQuery, maxBoost, combineFunction, minScore, filterFunctions); + return Objects.hash(super.hashCode(), subQuery, maxBoost, combineFunction, minScore, scoreMode, Arrays.hashCode(filterFunctions)); } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index 972fb794fb5..3cf4f3e48f7 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -19,10 +19,15 @@ package org.elasticsearch.common.lucene.search.function; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; -import org.apache.lucene.search.*; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.FilterScorer; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; import org.apache.lucene.util.ToStringUtils; import java.io.IOException; @@ -86,7 +91,7 @@ public class FunctionScoreQuery extends Query { @Override public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - if (needsScores == false) { + if (needsScores == false && minScore == null) { return subQuery.createWeight(searcher, needsScores); } @@ -124,8 +129,7 @@ public class FunctionScoreQuery extends Query { subQueryWeight.normalize(norm, boost); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); if (subQueryScorer == null) { return null; @@ -134,7 +138,16 @@ public class FunctionScoreQuery extends Query { if (function != null) { leafFunction = function.getLeafScoreFunction(context); } - return new FunctionFactorScorer(this, subQueryScorer, leafFunction, maxBoost, combineFunction, minScore, needsScores); + return new FunctionFactorScorer(this, subQueryScorer, leafFunction, maxBoost, combineFunction, needsScores); + } + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + Scorer scorer = functionScorer(context); + if (scorer != null && minScore != null) { + scorer = new MinScoreScorer(this, scorer, minScore); + } + return scorer; } @Override @@ -143,38 +156,47 @@ public class FunctionScoreQuery extends Query { if (!subQueryExpl.isMatch()) { return subQueryExpl; } + Explanation expl; if (function != null) { Explanation functionExplanation = function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl); - return combineFunction.explain(subQueryExpl, functionExplanation, maxBoost); + expl = combineFunction.explain(subQueryExpl, functionExplanation, maxBoost); } else { - return subQueryExpl; + expl = subQueryExpl; } + if (minScore != null && minScore > expl.getValue()) { + expl = Explanation.noMatch("Score value is too low, expected at least " + minScore + " but got " + expl.getValue(), expl); + } + return expl; } } - static class FunctionFactorScorer extends CustomBoostFactorScorer { + static class FunctionFactorScorer extends FilterScorer { private final LeafScoreFunction function; private final boolean needsScores; + private final CombineFunction scoreCombiner; + private final float maxBoost; - private FunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, LeafScoreFunction function, float maxBoost, CombineFunction scoreCombiner, Float minScore, boolean needsScores) + private FunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, LeafScoreFunction function, float maxBoost, CombineFunction scoreCombiner, boolean needsScores) throws IOException { - super(w, scorer, maxBoost, scoreCombiner, minScore); + super(scorer, w); this.function = function; + this.scoreCombiner = scoreCombiner; + this.maxBoost = maxBoost; this.needsScores = needsScores; } @Override - public float innerScore() throws IOException { + public float score() throws IOException { // Even if the weight is created with needsScores=false, it might // be costly to call score(), so we explicitly check if scores // are needed - float score = needsScores ? scorer.score() : 0f; + float score = needsScores ? super.score() : 0f; if (function == null) { return score; } else { return scoreCombiner.combine(score, - function.score(scorer.docID(), score), maxBoost); + function.score(docID(), score), maxBoost); } } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java new file mode 100644 index 00000000000..b4b87bda6d8 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java @@ -0,0 +1,95 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.lucene.search.function; + +import java.io.IOException; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.ScoreCachingWrappingScorer; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.search.Weight; + +/** A {@link Scorer} that filters out documents that have a score that is + * lower than a configured constant. */ +final class MinScoreScorer extends Scorer { + + private final Scorer in; + private final float minScore; + + MinScoreScorer(Weight weight, Scorer scorer, float minScore) { + super(weight); + if (scorer instanceof ScoreCachingWrappingScorer == false) { + // when minScore is set, scores might be requested twice: once + // to verify the match, and once by the collector + scorer = new ScoreCachingWrappingScorer(scorer); + } + this.in = scorer; + this.minScore = minScore; + } + + public Scorer getScorer() { + return in; + } + + @Override + public int docID() { + return in.docID(); + } + + @Override + public float score() throws IOException { + return in.score(); + } + + @Override + public int freq() throws IOException { + return in.freq(); + } + + @Override + public DocIdSetIterator iterator() { + return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator()); + } + + @Override + public TwoPhaseIterator twoPhaseIterator() { + final TwoPhaseIterator inTwoPhase = this.in.twoPhaseIterator(); + final DocIdSetIterator approximation = inTwoPhase == null ? in.iterator() : inTwoPhase.approximation(); + return new TwoPhaseIterator(approximation) { + + @Override + public boolean matches() throws IOException { + // we need to check the two-phase iterator first + // otherwise calling score() is illegal + if (inTwoPhase != null && inTwoPhase.matches() == false) { + return false; + } + return in.score() >= minScore; + } + + @Override + public float matchCost() { + return 1000f // random constant for the score computation + + (inTwoPhase == null ? 0 : inTwoPhase.matchCost()); + } + }; + } +} diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 9013b4b60e0..f027b7c58cb 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.lucene.search.function; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Scorer; import org.elasticsearch.script.ExplainableSearchScript; @@ -57,19 +58,9 @@ public class ScriptScoreFunction extends ScoreFunction { } @Override - public int nextDoc() throws IOException { + public DocIdSetIterator iterator() { throw new UnsupportedOperationException(); } - - @Override - public int advance(int target) throws IOException { - throw new UnsupportedOperationException(); - } - - @Override - public long cost() { - return 1; - } } private final Script sScript; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDAndVersionLookup.java b/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDAndVersionLookup.java index 0cf8a520094..60b8e2086de 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDAndVersionLookup.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDAndVersionLookup.java @@ -19,8 +19,6 @@ package org.elasticsearch.common.lucene.uid; * under the License. */ -import java.io.IOException; - import org.apache.lucene.index.Fields; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; @@ -36,6 +34,8 @@ import org.elasticsearch.common.lucene.uid.Versions.DocIdAndVersion; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper; +import java.io.IOException; + /** Utility class to do efficient primary-key (only 1 doc contains the * given term) lookups by segment, re-using the enums. This class is diff --git a/core/src/main/java/org/elasticsearch/common/lucene/uid/Versions.java b/core/src/main/java/org/elasticsearch/common/lucene/uid/Versions.java index a7993384267..a0cf923c5f1 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/uid/Versions.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/uid/Versions.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.lucene.uid; -import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader.CoreClosedListener; diff --git a/core/src/main/java/org/elasticsearch/common/metrics/EWMA.java b/core/src/main/java/org/elasticsearch/common/metrics/EWMA.java index eefaaf22e7a..b2fca5f6605 100644 --- a/core/src/main/java/org/elasticsearch/common/metrics/EWMA.java +++ b/core/src/main/java/org/elasticsearch/common/metrics/EWMA.java @@ -19,13 +19,12 @@ package org.elasticsearch.common.metrics; -import java.util.concurrent.atomic.LongAdder; - import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.LongAdder; /** * An exponentially-weighted moving average. - * + * *

* Taken from codahale metric module, changed to use LongAdder * diff --git a/core/src/main/java/org/elasticsearch/common/metrics/MeterMetric.java b/core/src/main/java/org/elasticsearch/common/metrics/MeterMetric.java index 8f7b46c355f..3a24df0208e 100644 --- a/core/src/main/java/org/elasticsearch/common/metrics/MeterMetric.java +++ b/core/src/main/java/org/elasticsearch/common/metrics/MeterMetric.java @@ -21,10 +21,10 @@ package org.elasticsearch.common.metrics; import org.elasticsearch.common.util.concurrent.FutureUtils; -import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.LongAdder; /** * A meter metric which measures mean throughput and one-, five-, and diff --git a/core/src/main/java/org/elasticsearch/common/netty/OpenChannelsHandler.java b/core/src/main/java/org/elasticsearch/common/netty/OpenChannelsHandler.java index 8b687b1fcec..324db75dc90 100644 --- a/core/src/main/java/org/elasticsearch/common/netty/OpenChannelsHandler.java +++ b/core/src/main/java/org/elasticsearch/common/netty/OpenChannelsHandler.java @@ -22,7 +22,15 @@ package org.elasticsearch.common.netty; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelEvent; +import org.jboss.netty.channel.ChannelFuture; +import org.jboss.netty.channel.ChannelFutureListener; +import org.jboss.netty.channel.ChannelHandler; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ChannelState; +import org.jboss.netty.channel.ChannelStateEvent; +import org.jboss.netty.channel.ChannelUpstreamHandler; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index c1f282ac234..12e22a7693b 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -19,21 +19,366 @@ package org.elasticsearch.common.network; +import org.elasticsearch.client.support.Headers; +import org.elasticsearch.client.transport.TransportClientNodesService; +import org.elasticsearch.client.transport.support.TransportProxyClient; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.ExtensionPoint; +import org.elasticsearch.http.HttpServer; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.http.netty.NettyHttpServerTransport; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.action.admin.cluster.health.RestClusterHealthAction; +import org.elasticsearch.rest.action.admin.cluster.node.hotthreads.RestNodesHotThreadsAction; +import org.elasticsearch.rest.action.admin.cluster.node.info.RestNodesInfoAction; +import org.elasticsearch.rest.action.admin.cluster.node.stats.RestNodesStatsAction; +import org.elasticsearch.rest.action.admin.cluster.node.tasks.RestListTasksAction; +import org.elasticsearch.rest.action.admin.cluster.repositories.delete.RestDeleteRepositoryAction; +import org.elasticsearch.rest.action.admin.cluster.repositories.get.RestGetRepositoriesAction; +import org.elasticsearch.rest.action.admin.cluster.repositories.put.RestPutRepositoryAction; +import org.elasticsearch.rest.action.admin.cluster.repositories.verify.RestVerifyRepositoryAction; +import org.elasticsearch.rest.action.admin.cluster.reroute.RestClusterRerouteAction; +import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterGetSettingsAction; +import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterUpdateSettingsAction; +import org.elasticsearch.rest.action.admin.cluster.shards.RestClusterSearchShardsAction; +import org.elasticsearch.rest.action.admin.cluster.snapshots.create.RestCreateSnapshotAction; +import org.elasticsearch.rest.action.admin.cluster.snapshots.delete.RestDeleteSnapshotAction; +import org.elasticsearch.rest.action.admin.cluster.snapshots.get.RestGetSnapshotsAction; +import org.elasticsearch.rest.action.admin.cluster.snapshots.restore.RestRestoreSnapshotAction; +import org.elasticsearch.rest.action.admin.cluster.snapshots.status.RestSnapshotsStatusAction; +import org.elasticsearch.rest.action.admin.cluster.state.RestClusterStateAction; +import org.elasticsearch.rest.action.admin.cluster.stats.RestClusterStatsAction; +import org.elasticsearch.rest.action.admin.cluster.tasks.RestPendingClusterTasksAction; +import org.elasticsearch.rest.action.admin.indices.alias.RestIndicesAliasesAction; +import org.elasticsearch.rest.action.admin.indices.alias.delete.RestIndexDeleteAliasesAction; +import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetAliasesAction; +import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetIndicesAliasesAction; +import org.elasticsearch.rest.action.admin.indices.alias.head.RestAliasesExistAction; +import org.elasticsearch.rest.action.admin.indices.alias.put.RestIndexPutAliasAction; +import org.elasticsearch.rest.action.admin.indices.analyze.RestAnalyzeAction; +import org.elasticsearch.rest.action.admin.indices.cache.clear.RestClearIndicesCacheAction; +import org.elasticsearch.rest.action.admin.indices.close.RestCloseIndexAction; +import org.elasticsearch.rest.action.admin.indices.create.RestCreateIndexAction; +import org.elasticsearch.rest.action.admin.indices.delete.RestDeleteIndexAction; +import org.elasticsearch.rest.action.admin.indices.exists.indices.RestIndicesExistsAction; +import org.elasticsearch.rest.action.admin.indices.exists.types.RestTypesExistsAction; +import org.elasticsearch.rest.action.admin.indices.flush.RestFlushAction; +import org.elasticsearch.rest.action.admin.indices.flush.RestSyncedFlushAction; +import org.elasticsearch.rest.action.admin.indices.forcemerge.RestForceMergeAction; +import org.elasticsearch.rest.action.admin.indices.get.RestGetIndicesAction; +import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetFieldMappingAction; +import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetMappingAction; +import org.elasticsearch.rest.action.admin.indices.mapping.put.RestPutMappingAction; +import org.elasticsearch.rest.action.admin.indices.open.RestOpenIndexAction; +import org.elasticsearch.rest.action.admin.indices.recovery.RestRecoveryAction; +import org.elasticsearch.rest.action.admin.indices.refresh.RestRefreshAction; +import org.elasticsearch.rest.action.admin.indices.segments.RestIndicesSegmentsAction; +import org.elasticsearch.rest.action.admin.indices.settings.RestGetSettingsAction; +import org.elasticsearch.rest.action.admin.indices.settings.RestUpdateSettingsAction; +import org.elasticsearch.rest.action.admin.indices.shards.RestIndicesShardStoresAction; +import org.elasticsearch.rest.action.admin.indices.stats.RestIndicesStatsAction; +import org.elasticsearch.rest.action.admin.indices.template.delete.RestDeleteIndexTemplateAction; +import org.elasticsearch.rest.action.admin.indices.template.get.RestGetIndexTemplateAction; +import org.elasticsearch.rest.action.admin.indices.template.head.RestHeadIndexTemplateAction; +import org.elasticsearch.rest.action.admin.indices.template.put.RestPutIndexTemplateAction; +import org.elasticsearch.rest.action.admin.indices.upgrade.RestUpgradeAction; +import org.elasticsearch.rest.action.admin.indices.validate.query.RestValidateQueryAction; +import org.elasticsearch.rest.action.admin.indices.validate.template.RestRenderSearchTemplateAction; +import org.elasticsearch.rest.action.admin.indices.warmer.delete.RestDeleteWarmerAction; +import org.elasticsearch.rest.action.admin.indices.warmer.get.RestGetWarmerAction; +import org.elasticsearch.rest.action.admin.indices.warmer.put.RestPutWarmerAction; +import org.elasticsearch.rest.action.bulk.RestBulkAction; +import org.elasticsearch.rest.action.cat.AbstractCatAction; +import org.elasticsearch.rest.action.cat.RestAliasAction; +import org.elasticsearch.rest.action.cat.RestAllocationAction; +import org.elasticsearch.rest.action.cat.RestCatAction; +import org.elasticsearch.rest.action.cat.RestFielddataAction; +import org.elasticsearch.rest.action.cat.RestHealthAction; +import org.elasticsearch.rest.action.cat.RestIndicesAction; +import org.elasticsearch.rest.action.cat.RestMasterAction; +import org.elasticsearch.rest.action.cat.RestNodeAttrsAction; +import org.elasticsearch.rest.action.cat.RestNodesAction; +import org.elasticsearch.rest.action.cat.RestPluginsAction; +import org.elasticsearch.rest.action.cat.RestRepositoriesAction; +import org.elasticsearch.rest.action.cat.RestSegmentsAction; +import org.elasticsearch.rest.action.cat.RestShardsAction; +import org.elasticsearch.rest.action.cat.RestSnapshotAction; +import org.elasticsearch.rest.action.cat.RestThreadPoolAction; +import org.elasticsearch.rest.action.delete.RestDeleteAction; +import org.elasticsearch.rest.action.explain.RestExplainAction; +import org.elasticsearch.rest.action.fieldstats.RestFieldStatsAction; +import org.elasticsearch.rest.action.get.RestGetAction; +import org.elasticsearch.rest.action.get.RestGetSourceAction; +import org.elasticsearch.rest.action.get.RestHeadAction; +import org.elasticsearch.rest.action.get.RestMultiGetAction; +import org.elasticsearch.rest.action.index.RestIndexAction; +import org.elasticsearch.rest.action.main.RestMainAction; +import org.elasticsearch.rest.action.percolate.RestMultiPercolateAction; +import org.elasticsearch.rest.action.percolate.RestPercolateAction; +import org.elasticsearch.rest.action.script.RestDeleteIndexedScriptAction; +import org.elasticsearch.rest.action.script.RestGetIndexedScriptAction; +import org.elasticsearch.rest.action.script.RestPutIndexedScriptAction; +import org.elasticsearch.rest.action.search.RestClearScrollAction; +import org.elasticsearch.rest.action.search.RestMultiSearchAction; +import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.rest.action.search.RestSearchScrollAction; +import org.elasticsearch.rest.action.suggest.RestSuggestAction; +import org.elasticsearch.rest.action.template.RestDeleteSearchTemplateAction; +import org.elasticsearch.rest.action.template.RestGetSearchTemplateAction; +import org.elasticsearch.rest.action.template.RestPutSearchTemplateAction; +import org.elasticsearch.rest.action.termvectors.RestMultiTermVectorsAction; +import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction; +import org.elasticsearch.rest.action.update.RestUpdateAction; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.local.LocalTransport; +import org.elasticsearch.transport.netty.NettyTransport; + +import java.util.Arrays; +import java.util.List; /** - * + * A module to handle registering and binding all network related classes. */ public class NetworkModule extends AbstractModule { - private final NetworkService networkService; + public static final String TRANSPORT_TYPE_KEY = "transport.type"; + public static final String TRANSPORT_SERVICE_TYPE_KEY = "transport.service.type"; - public NetworkModule(NetworkService networkService) { + public static final String LOCAL_TRANSPORT = "local"; + public static final String NETTY_TRANSPORT = "netty"; + + public static final String HTTP_TYPE_KEY = "http.type"; + public static final String HTTP_ENABLED = "http.enabled"; + + private static final List> builtinRestHandlers = Arrays.asList( + RestMainAction.class, + + RestNodesInfoAction.class, + RestNodesStatsAction.class, + RestNodesHotThreadsAction.class, + RestClusterStatsAction.class, + RestClusterStateAction.class, + RestClusterHealthAction.class, + RestClusterUpdateSettingsAction.class, + RestClusterGetSettingsAction.class, + RestClusterRerouteAction.class, + RestClusterSearchShardsAction.class, + RestPendingClusterTasksAction.class, + RestPutRepositoryAction.class, + RestGetRepositoriesAction.class, + RestDeleteRepositoryAction.class, + RestVerifyRepositoryAction.class, + RestGetSnapshotsAction.class, + RestCreateSnapshotAction.class, + RestRestoreSnapshotAction.class, + RestDeleteSnapshotAction.class, + RestSnapshotsStatusAction.class, + + RestIndicesExistsAction.class, + RestTypesExistsAction.class, + RestGetIndicesAction.class, + RestIndicesStatsAction.class, + RestIndicesSegmentsAction.class, + RestIndicesShardStoresAction.class, + RestGetAliasesAction.class, + RestAliasesExistAction.class, + RestIndexDeleteAliasesAction.class, + RestIndexPutAliasAction.class, + RestIndicesAliasesAction.class, + RestGetIndicesAliasesAction.class, + RestCreateIndexAction.class, + RestDeleteIndexAction.class, + RestCloseIndexAction.class, + RestOpenIndexAction.class, + + RestUpdateSettingsAction.class, + RestGetSettingsAction.class, + + RestAnalyzeAction.class, + RestGetIndexTemplateAction.class, + RestPutIndexTemplateAction.class, + RestDeleteIndexTemplateAction.class, + RestHeadIndexTemplateAction.class, + + RestPutWarmerAction.class, + RestDeleteWarmerAction.class, + RestGetWarmerAction.class, + + RestPutMappingAction.class, + RestGetMappingAction.class, + RestGetFieldMappingAction.class, + + RestRefreshAction.class, + RestFlushAction.class, + RestSyncedFlushAction.class, + RestForceMergeAction.class, + RestUpgradeAction.class, + RestClearIndicesCacheAction.class, + + RestIndexAction.class, + RestGetAction.class, + RestGetSourceAction.class, + RestHeadAction.class, + RestMultiGetAction.class, + RestDeleteAction.class, + org.elasticsearch.rest.action.count.RestCountAction.class, + RestSuggestAction.class, + RestTermVectorsAction.class, + RestMultiTermVectorsAction.class, + RestBulkAction.class, + RestUpdateAction.class, + RestPercolateAction.class, + RestMultiPercolateAction.class, + + RestSearchAction.class, + RestSearchScrollAction.class, + RestClearScrollAction.class, + RestMultiSearchAction.class, + RestRenderSearchTemplateAction.class, + + RestValidateQueryAction.class, + + RestExplainAction.class, + + RestRecoveryAction.class, + + // Templates API + RestGetSearchTemplateAction.class, + RestPutSearchTemplateAction.class, + RestDeleteSearchTemplateAction.class, + + // Scripts API + RestGetIndexedScriptAction.class, + RestPutIndexedScriptAction.class, + RestDeleteIndexedScriptAction.class, + + RestFieldStatsAction.class, + + // no abstract cat action + RestCatAction.class, + + // Tasks API + RestListTasksAction.class + ); + + private static final List> builtinCatHandlers = Arrays.asList( + RestAllocationAction.class, + RestShardsAction.class, + RestMasterAction.class, + RestNodesAction.class, + RestIndicesAction.class, + RestSegmentsAction.class, + // Fully qualified to prevent interference with rest.action.count.RestCountAction + org.elasticsearch.rest.action.cat.RestCountAction.class, + // Fully qualified to prevent interference with rest.action.indices.RestRecoveryAction + org.elasticsearch.rest.action.cat.RestRecoveryAction.class, + RestHealthAction.class, + org.elasticsearch.rest.action.cat.RestPendingClusterTasksAction.class, + RestAliasAction.class, + RestThreadPoolAction.class, + RestPluginsAction.class, + RestFielddataAction.class, + RestNodeAttrsAction.class, + RestRepositoriesAction.class, + RestSnapshotAction.class + ); + + private final NetworkService networkService; + private final Settings settings; + private final boolean transportClient; + + private final ExtensionPoint.SelectedType transportServiceTypes = new ExtensionPoint.SelectedType<>("transport_service", TransportService.class); + private final ExtensionPoint.SelectedType transportTypes = new ExtensionPoint.SelectedType<>("transport", Transport.class); + private final ExtensionPoint.SelectedType httpTransportTypes = new ExtensionPoint.SelectedType<>("http_transport", HttpServerTransport.class); + private final ExtensionPoint.ClassSet restHandlers = new ExtensionPoint.ClassSet<>("rest_handler", RestHandler.class); + // we must separate the cat rest handlers so RestCatAction can collect them... + private final ExtensionPoint.ClassSet catHandlers = new ExtensionPoint.ClassSet<>("cat_handler", AbstractCatAction.class); + + /** + * Creates a network module that custom networking classes can be plugged into. + * + * @param networkService A constructed network service object to bind. + * @param settings The settings for the node + * @param transportClient True if only transport classes should be allowed to be registered, false otherwise. + */ + public NetworkModule(NetworkService networkService, Settings settings, boolean transportClient) { this.networkService = networkService; + this.settings = settings; + this.transportClient = transportClient; + registerTransportService(NETTY_TRANSPORT, TransportService.class); + registerTransport(LOCAL_TRANSPORT, LocalTransport.class); + registerTransport(NETTY_TRANSPORT, NettyTransport.class); + + if (transportClient == false) { + registerHttpTransport(NETTY_TRANSPORT, NettyHttpServerTransport.class); + + for (Class catAction : builtinCatHandlers) { + catHandlers.registerExtension(catAction); + } + for (Class restAction : builtinRestHandlers) { + restHandlers.registerExtension(restAction); + } + } + } + + /** Adds a transport service implementation that can be selected by setting {@link #TRANSPORT_SERVICE_TYPE_KEY}. */ + public void registerTransportService(String name, Class clazz) { + transportServiceTypes.registerExtension(name, clazz); + } + + /** Adds a transport implementation that can be selected by setting {@link #TRANSPORT_TYPE_KEY}. */ + public void registerTransport(String name, Class clazz) { + transportTypes.registerExtension(name, clazz); + } + + /** Adds an http transport implementation that can be selected by setting {@link #HTTP_TYPE_KEY}. */ + // TODO: we need another name than "http transport"....so confusing with transportClient... + public void registerHttpTransport(String name, Class clazz) { + if (transportClient) { + throw new IllegalArgumentException("Cannot register http transport " + clazz.getName() + " for transport client"); + } + httpTransportTypes.registerExtension(name, clazz); + } + + /** Adds an additional rest action. */ + // TODO: change this further to eliminate the middle man, ie RestController, and just register method and path here + public void registerRestHandler(Class clazz) { + if (transportClient) { + throw new IllegalArgumentException("Cannot register rest handler " + clazz.getName() + " for transport client"); + } + if (AbstractCatAction.class.isAssignableFrom(clazz)) { + catHandlers.registerExtension(clazz.asSubclass(AbstractCatAction.class)); + } else { + restHandlers.registerExtension(clazz); + } } @Override protected void configure() { bind(NetworkService.class).toInstance(networkService); + bind(NamedWriteableRegistry.class).asEagerSingleton(); + + transportServiceTypes.bindType(binder(), settings, TRANSPORT_SERVICE_TYPE_KEY, NETTY_TRANSPORT); + String defaultTransport = DiscoveryNode.localNode(settings) ? LOCAL_TRANSPORT : NETTY_TRANSPORT; + transportTypes.bindType(binder(), settings, TRANSPORT_TYPE_KEY, defaultTransport); + + if (transportClient) { + bind(Headers.class).asEagerSingleton(); + bind(TransportProxyClient.class).asEagerSingleton(); + bind(TransportClientNodesService.class).asEagerSingleton(); + } else { + if (settings.getAsBoolean(HTTP_ENABLED, true)) { + bind(HttpServer.class).asEagerSingleton(); + httpTransportTypes.bindType(binder(), settings, HTTP_TYPE_KEY, NETTY_TRANSPORT); + } + bind(RestController.class).asEagerSingleton(); + catHandlers.bind(binder()); + restHandlers.bind(binder()); + } } } diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java new file mode 100644 index 00000000000..2c599559920 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -0,0 +1,256 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.settings; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.component.AbstractComponent; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.Consumer; + +/** + * A basic setting service that can be used for per-index and per-cluster settings. + * This service offers transactional application of updates settings. + */ +public abstract class AbstractScopedSettings extends AbstractComponent { + private Settings lastSettingsApplied = Settings.EMPTY; + private final List settingUpdaters = new ArrayList<>(); + private final Map> complexMatchers = new HashMap<>(); + private final Map> keySettings = new HashMap<>(); + private final Setting.Scope scope; + + protected AbstractScopedSettings(Settings settings, Set> settingsSet, Setting.Scope scope) { + super(settings); + for (Setting entry : settingsSet) { + if (entry.getScope() != scope) { + throw new IllegalArgumentException("Setting must be a cluster setting but was: " + entry.getScope()); + } + if (entry.hasComplexMatcher()) { + complexMatchers.put(entry.getKey(), entry); + } else { + keySettings.put(entry.getKey(), entry); + } + } + this.scope = scope; + } + + public Setting.Scope getScope() { + return this.scope; + } + + /** + * Applies the given settings to all listeners and rolls back the result after application. This + * method will not change any settings but will fail if any of the settings can't be applied. + */ + public synchronized Settings dryRun(Settings settings) { + final Settings current = Settings.builder().put(this.settings).put(settings).build(); + final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); + List exceptions = new ArrayList<>(); + for (SettingUpdater settingUpdater : settingUpdaters) { + try { + if (settingUpdater.hasChanged(current, previous)) { + settingUpdater.getValue(current, previous); + } + } catch (RuntimeException ex) { + exceptions.add(ex); + logger.debug("failed to prepareCommit settings for [{}]", ex, settingUpdater); + } + } + // here we are exhaustive and record all settings that failed. + ExceptionsHelper.rethrowAndSuppress(exceptions); + return current; + } + + /** + * Applies the given settings to all the settings consumers or to none of them. The settings + * will be merged with the node settings before they are applied while given settings override existing node + * settings. + * @param newSettings the settings to apply + * @return the unmerged applied settings + */ + public synchronized Settings applySettings(Settings newSettings) { + if (lastSettingsApplied != null && newSettings.equals(lastSettingsApplied)) { + // nothing changed in the settings, ignore + return newSettings; + } + final Settings current = Settings.builder().put(this.settings).put(newSettings).build(); + final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); + try { + List applyRunnables = new ArrayList<>(); + for (SettingUpdater settingUpdater : settingUpdaters) { + try { + applyRunnables.add(settingUpdater.updater(current, previous)); + } catch (Exception ex) { + logger.warn("failed to prepareCommit settings for [{}]", ex, settingUpdater); + throw ex; + } + } + for (Runnable settingUpdater : applyRunnables) { + settingUpdater.run(); + } + } catch (Exception ex) { + logger.warn("failed to apply settings", ex); + throw ex; + } finally { + } + return lastSettingsApplied = newSettings; + } + + /** + * Adds a settings consumer with a predicate that is only evaluated at update time. + *

+ * Note: Only settings registered in {@link SettingsModule} can be changed dynamically. + *

+ * @param validator an additional validator that is only applied to updates of this setting. + * This is useful to add additional validation to settings at runtime compared to at startup time. + */ + public synchronized void addSettingsUpdateConsumer(Setting setting, Consumer consumer, Consumer validator) { + if (setting != get(setting.getKey())) { + throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]"); + } + this.settingUpdaters.add(setting.newUpdater(consumer, logger, validator)); + } + + /** + * Adds a settings consumer that accepts the values for two settings. The consumer if only notified if one or both settings change. + *

+ * Note: Only settings registered in {@link SettingsModule} can be changed dynamically. + *

+ * This method registers a compound updater that is useful if two settings are depending on each other. The consumer is always provided + * with both values even if only one of the two changes. + */ + public synchronized void addSettingsUpdateConsumer(Setting a, Setting b, BiConsumer consumer) { + if (a != get(a.getKey())) { + throw new IllegalArgumentException("Setting is not registered for key [" + a.getKey() + "]"); + } + if (b != get(b.getKey())) { + throw new IllegalArgumentException("Setting is not registered for key [" + b.getKey() + "]"); + } + this.settingUpdaters.add(Setting.compoundUpdater(consumer, a, b, logger)); + } + + /** + * Adds a settings consumer. + *

+ * Note: Only settings registered in {@link org.elasticsearch.cluster.ClusterModule} can be changed dynamically. + *

+ */ + public synchronized void addSettingsUpdateConsumer(Setting setting, Consumer consumer) { + addSettingsUpdateConsumer(setting, consumer, (s) -> {}); + } + + /** + * Transactional interface to update settings. + * @see Setting + */ + public interface SettingUpdater { + + /** + * Returns true if this updaters setting has changed with the current update + * @param current the current settings + * @param previous the previous setting + * @return true if this updaters setting has changed with the current update + */ + boolean hasChanged(Settings current, Settings previous); + + /** + * Returns the instance value for the current settings. This method is stateless and idempotent. + * This method will throw an exception if the source of this value is invalid. + */ + T getValue(Settings current, Settings previous); + + /** + * Applies the given value to the updater. This methods will actually run the update. + */ + void apply(T value, Settings current, Settings previous); + + /** + * Updates this updaters value if it has changed. + * @return true iff the value has been updated. + */ + default boolean apply(Settings current, Settings previous) { + if (hasChanged(current, previous)) { + T value = getValue(current, previous); + apply(value, current, previous); + return true; + } + return false; + } + + /** + * Returns a callable runnable that calls {@link #apply(Object, Settings, Settings)} if the settings + * actually changed. This allows to defer the update to a later point in time while keeping type safety. + * If the value didn't change the returned runnable is a noop. + */ + default Runnable updater(Settings current, Settings previous) { + if (hasChanged(current, previous)) { + T value = getValue(current, previous); + return () -> { apply(value, current, previous);}; + } + return () -> {}; + } + } + + /** + * Returns the {@link Setting} for the given key or null if the setting can not be found. + */ + public Setting get(String key) { + Setting setting = keySettings.get(key); + if (setting == null) { + for (Map.Entry> entry : complexMatchers.entrySet()) { + if (entry.getValue().match(key)) { + return entry.getValue(); + } + } + } else { + return setting; + } + return null; + } + + /** + * Returns true if the setting for the given key is dynamically updateable. Otherwise false. + */ + public boolean hasDynamicSetting(String key) { + final Setting setting = get(key); + return setting != null && setting.isDynamic(); + } + + /** + * Returns a settings object that contains all settings that are not + * already set in the given source. The diff contains either the default value for each + * setting or the settings value in the given default settings. + */ + public Settings diff(Settings source, Settings defaultSettings) { + Settings.Builder builder = Settings.builder(); + for (Setting setting : keySettings.values()) { + if (setting.exists(source) == false) { + builder.put(setting.getKey(), setting.getRaw(defaultSettings)); + } + } + return builder.build(); + } + +} diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java new file mode 100644 index 00000000000..10c602688a4 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -0,0 +1,153 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.settings; + +import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; +import org.elasticsearch.action.support.DestructiveOperations; +import org.elasticsearch.cluster.InternalClusterInfoService; +import org.elasticsearch.cluster.action.index.MappingUpdatedAction; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; +import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; +import org.elasticsearch.cluster.service.InternalClusterService; +import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.discovery.zen.ZenDiscovery; +import org.elasticsearch.discovery.zen.elect.ElectMasterService; +import org.elasticsearch.index.store.IndexStoreConfig; +import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; +import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.indices.ttl.IndicesTTLService; +import org.elasticsearch.search.SearchService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * Encapsulates all valid cluster level settings. + */ +public final class ClusterSettings extends AbstractScopedSettings { + + public ClusterSettings(Settings settings, Set> settingsSet) { + super(settings, settingsSet, Setting.Scope.CLUSTER); + } + + + @Override + public synchronized Settings applySettings(Settings newSettings) { + Settings settings = super.applySettings(newSettings); + try { + for (Map.Entry entry : settings.getAsMap().entrySet()) { + if (entry.getKey().startsWith("logger.")) { + String component = entry.getKey().substring("logger.".length()); + if ("_root".equals(component)) { + ESLoggerFactory.getRootLogger().setLevel(entry.getValue()); + } else { + ESLoggerFactory.getLogger(component).setLevel(entry.getValue()); + } + } + } + } catch (Exception e) { + logger.warn("failed to refresh settings for [{}]", e, "logger"); + } + return settings; + } + + /** + * Returns true if the settings is a logger setting. + */ + public boolean isLoggerSetting(String key) { + return key.startsWith("logger."); + } + + + public static Set> BUILT_IN_CLUSTER_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, + AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, + BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, + BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, + BalancedShardsAllocator.THRESHOLD_SETTING, + ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, + ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING, + EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, + EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING, + ZenDiscovery.REJOIN_ON_MASTER_GONE_SETTING, + FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, + FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING, + FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING, + IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, + IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, + IndicesTTLService.INDICES_TTL_INTERVAL_SETTING, + MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, + MetaData.SETTING_READ_ONLY_SETTING, + RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, + RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING, + RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING, + RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, + RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING, + RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING, + ThreadPool.THREADPOOL_GROUP_SETTING, + ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, + ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, + ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, + ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING, + DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING, + InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING, + InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING, + SnapshotInProgressAllocationDecider.CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING, + DestructiveOperations.REQUIRES_NAME_SETTING, + DiscoverySettings.PUBLISH_TIMEOUT_SETTING, + DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING, + DiscoverySettings.COMMIT_TIMEOUT_SETTING, + DiscoverySettings.NO_MASTER_BLOCK_SETTING, + HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, + InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + SearchService.DEFAULT_SEARCH_TIMEOUT_SETTING, + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, + TransportService.TRACE_LOG_EXCLUDE_SETTING, + TransportService.TRACE_LOG_INCLUDE_SETTING, + TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING, + ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING, + InternalClusterService.CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING, + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, + Transport.TRANSPORT_PROFILES_SETTING, + Transport.TRANSPORT_TCP_COMPRESS))); +} diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java new file mode 100644 index 00000000000..4e9e9f9428b --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -0,0 +1,463 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.settings; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.MemorySizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.regex.Pattern; + +/** + */ +public class Setting extends ToXContentToBytes { + private final String key; + protected final Function defaultValue; + private final Function parser; + private final boolean dynamic; + private final Scope scope; + + /** + * Creates a new Setting instance + * @param key the settings key for this setting. + * @param defaultValue a default value function that returns the default values string representation. + * @param parser a parser that parses the string rep into a complex datatype. + * @param dynamic true iff this setting can be dynamically updateable + * @param scope the scope of this setting + */ + public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope) { + assert parser.apply(defaultValue.apply(Settings.EMPTY)) != null || this.isGroupSetting(): "parser returned null"; + this.key = key; + this.defaultValue = defaultValue; + this.parser = parser; + this.dynamic = dynamic; + this.scope = scope; + } + + /** + * Returns the settings key or a prefix if this setting is a group setting + * @see #isGroupSetting() + */ + public final String getKey() { + return key; + } + + /** + * Returns true iff this setting is dynamically updateable, otherwise false + */ + public final boolean isDynamic() { + return dynamic; + } + + /** + * Returns the settings scope + */ + public final Scope getScope() { + return scope; + } + + /** + * Returns true iff this setting is a group setting. Group settings represent a set of settings + * rather than a single value. The key, see {@link #getKey()}, in contrast to non-group settings is a prefix like cluster.store. + * that matches all settings with this prefix. + */ + boolean isGroupSetting() { + return false; + } + + boolean hasComplexMatcher() { + return isGroupSetting(); + } + + /** + * Returns the default values string representation for this setting. + * @param settings a settings object for settings that has a default value depending on another setting if available + */ + public final String getDefault(Settings settings) { + return defaultValue.apply(settings); + } + + /** + * Returns true iff this setting is present in the given settings object. Otherwise false + */ + public final boolean exists(Settings settings) { + return settings.get(key) != null; + } + + /** + * Returns the settings value. If the setting is not present in the given settings object the default value is returned + * instead. + */ + public T get(Settings settings) { + String value = getRaw(settings); + try { + return parser.apply(value); + } catch (ElasticsearchParseException ex) { + throw new IllegalArgumentException(ex.getMessage(), ex); + } catch (NumberFormatException ex) { + throw new IllegalArgumentException("Failed to parse value [" + value + "] for setting [" + getKey() + "]", ex); + } catch (IllegalArgumentException ex) { + throw ex; + } catch (Exception t) { + throw new IllegalArgumentException("Failed to parse value [" + value + "] for setting [" + getKey() + "]", t); + } + } + + /** + * Returns the raw (string) settings value. If the setting is not present in the given settings object the default value is returned + * instead. This is useful if the value can't be parsed due to an invalid value to access the actual value. + */ + public String getRaw(Settings settings) { + return settings.get(key, defaultValue.apply(settings)); + } + + /** + * Returns true iff the given key matches the settings key or if this setting is a group setting if the + * given key is part of the settings group. + * @see #isGroupSetting() + */ + public boolean match(String toTest) { + return key.equals(toTest); + } + + @Override + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("key", key); + builder.field("type", scope.name()); + builder.field("dynamic", dynamic); + builder.field("is_group_setting", isGroupSetting()); + builder.field("default", defaultValue.apply(Settings.EMPTY)); + builder.endObject(); + return builder; + } + + /** + * The settings scope - settings can either be cluster settings or per index settings. + */ + public enum Scope { + CLUSTER, + INDEX; + } + + final AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger) { + return newUpdater(consumer, logger, (s) -> {}); + } + + AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer validator) { + if (isDynamic()) { + return new Updater(consumer, logger, validator); + } else { + throw new IllegalStateException("setting [" + getKey() + "] is not dynamic"); + } + } + + /** + * this is used for settings that depend on each other... see {@link org.elasticsearch.common.settings.AbstractScopedSettings#addSettingsUpdateConsumer(Setting, Setting, BiConsumer)} and it's + * usage for details. + */ + static AbstractScopedSettings.SettingUpdater> compoundUpdater(final BiConsumer consumer, final Setting
aSettting, final Setting bSetting, ESLogger logger) { + final AbstractScopedSettings.SettingUpdater aSettingUpdater = aSettting.newUpdater(null, logger); + final AbstractScopedSettings.SettingUpdater bSettingUpdater = bSetting.newUpdater(null, logger); + return new AbstractScopedSettings.SettingUpdater>() { + @Override + public boolean hasChanged(Settings current, Settings previous) { + return aSettingUpdater.hasChanged(current, previous) || bSettingUpdater.hasChanged(current, previous); + } + + @Override + public Tuple getValue(Settings current, Settings previous) { + return new Tuple<>(aSettingUpdater.getValue(current, previous), bSettingUpdater.getValue(current, previous)); + } + + @Override + public void apply(Tuple value, Settings current, Settings previous) { + consumer.accept(value.v1(), value.v2()); + } + + @Override + public String toString() { + return "CompoundUpdater for: " + aSettingUpdater + " and " + bSettingUpdater; + } + }; + } + + + private class Updater implements AbstractScopedSettings.SettingUpdater { + private final Consumer consumer; + private final ESLogger logger; + private final Consumer accept; + + public Updater(Consumer consumer, ESLogger logger, Consumer accept) { + this.consumer = consumer; + this.logger = logger; + this.accept = accept; + } + + @Override + public String toString() { + return "Updater for: " + Setting.this.toString(); + } + + @Override + public boolean hasChanged(Settings current, Settings previous) { + final String newValue = getRaw(current); + final String value = getRaw(previous); + assert isGroupSetting() == false : "group settings must override this method"; + assert value != null : "value was null but can't be unless default is null which is invalid"; + + return value.equals(newValue) == false; + } + + @Override + public T getValue(Settings current, Settings previous) { + final String newValue = getRaw(current); + final String value = getRaw(previous); + T inst = get(current); + try { + accept.accept(inst); + } catch (Exception | AssertionError e) { + throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + value + "] to [" + newValue + "]", e); + } + return inst; + } + + @Override + public void apply(T value, Settings current, Settings previous) { + logger.info("update [{}] from [{}] to [{}]", key, getRaw(previous), getRaw(current)); + consumer.accept(value); + } + } + + + public Setting(String key, String defaultValue, Function parser, boolean dynamic, Scope scope) { + this(key, (s) -> defaultValue, parser, dynamic, scope); + } + + public static Setting floatSetting(String key, float defaultValue, boolean dynamic, Scope scope) { + return new Setting<>(key, (s) -> Float.toString(defaultValue), Float::parseFloat, dynamic, scope); + } + + public static Setting floatSetting(String key, float defaultValue, float minValue, boolean dynamic, Scope scope) { + return new Setting<>(key, (s) -> Float.toString(defaultValue), (s) -> { + float value = Float.parseFloat(s); + if (value < minValue) { + throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); + } + return value; + }, dynamic, scope); + } + + public static Setting intSetting(String key, int defaultValue, int minValue, boolean dynamic, Scope scope) { + return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), dynamic, scope); + } + + public static int parseInt(String s, int minValue, String key) { + int value = Integer.parseInt(s); + if (value < minValue) { + throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); + } + return value; + } + + public static Setting intSetting(String key, int defaultValue, boolean dynamic, Scope scope) { + return intSetting(key, defaultValue, Integer.MIN_VALUE, dynamic, scope); + } + + public static Setting boolSetting(String key, boolean defaultValue, boolean dynamic, Scope scope) { + return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, scope); + } + + public static Setting byteSizeSetting(String key, String percentage, boolean dynamic, Scope scope) { + return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), dynamic, scope); + } + + public static Setting byteSizeSetting(String key, ByteSizeValue value, boolean dynamic, Scope scope) { + return new Setting<>(key, (s) -> value.toString(), (s) -> ByteSizeValue.parseBytesSizeValue(s, key), dynamic, scope); + } + + public static Setting positiveTimeSetting(String key, TimeValue defaultValue, boolean dynamic, Scope scope) { + return timeSetting(key, defaultValue, TimeValue.timeValueMillis(0), dynamic, scope); + } + + public static Setting> listSetting(String key, List defaultStringValue, Function singleValueParser, boolean dynamic, Scope scope) { + return listSetting(key, (s) -> defaultStringValue, singleValueParser, dynamic, scope); + } + public static Setting> listSetting(String key, Function> defaultStringValue, Function singleValueParser, boolean dynamic, Scope scope) { + Function> parser = (s) -> { + try (XContentParser xContentParser = XContentType.JSON.xContent().createParser(s)){ + XContentParser.Token token = xContentParser.nextToken(); + if (token != XContentParser.Token.START_ARRAY) { + throw new IllegalArgumentException("expected START_ARRAY but got " + token); + } + ArrayList list = new ArrayList<>(); + while ((token = xContentParser.nextToken()) !=XContentParser.Token.END_ARRAY) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new IllegalArgumentException("expected VALUE_STRING but got " + token); + } + list.add(singleValueParser.apply(xContentParser.text())); + } + return list; + } catch (IOException e) { + throw new IllegalArgumentException("failed to parse array", e); + } + }; + return new Setting>(key, (s) -> arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, dynamic, scope) { + private final Pattern pattern = Pattern.compile(Pattern.quote(key)+"(\\.\\d+)?"); + @Override + public String getRaw(Settings settings) { + String[] array = settings.getAsArray(key, null); + return array == null ? defaultValue.apply(settings) : arrayToParsableString(array); + } + + public boolean match(String toTest) { + return pattern.matcher(toTest).matches(); + } + + @Override + boolean hasComplexMatcher() { + return true; + } + }; + } + + private static String arrayToParsableString(String[] array) { + try { + XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); + builder.startArray(); + for (String element : array) { + builder.value(element); + } + builder.endArray(); + return builder.string(); + } catch (IOException ex) { + throw new ElasticsearchException(ex); + } + } + + + + public static Setting groupSetting(String key, boolean dynamic, Scope scope) { + if (key.endsWith(".") == false) { + throw new IllegalArgumentException("key must end with a '.'"); + } + return new Setting(key, "", (s) -> null, dynamic, scope) { + + @Override + public boolean isGroupSetting() { + return true; + } + + @Override + public Settings get(Settings settings) { + return settings.getByPrefix(key); + } + + @Override + public boolean match(String toTest) { + return Regex.simpleMatch(key + "*", toTest); + } + + @Override + public AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer validator) { + if (isDynamic() == false) { + throw new IllegalStateException("setting [" + getKey() + "] is not dynamic"); + } + final Setting setting = this; + return new AbstractScopedSettings.SettingUpdater() { + + @Override + public boolean hasChanged(Settings current, Settings previous) { + Settings currentSettings = get(current); + Settings previousSettings = get(previous); + return currentSettings.equals(previousSettings) == false; + } + + @Override + public Settings getValue(Settings current, Settings previous) { + Settings currentSettings = get(current); + Settings previousSettings = get(previous); + try { + validator.accept(currentSettings); + } catch (Exception | AssertionError e) { + throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" + previousSettings.getAsMap() + "] to [" + currentSettings.getAsMap() + "]", e); + } + return currentSettings; + } + + @Override + public void apply(Settings value, Settings current, Settings previous) { + consumer.accept(value); + } + + @Override + public String toString() { + return "Updater for: " + setting.toString(); + } + }; + } + }; + } + + public static Setting timeSetting(String key, Function defaultValue, TimeValue minValue, boolean dynamic, Scope scope) { + return new Setting<>(key, defaultValue, (s) -> { + TimeValue timeValue = TimeValue.parseTimeValue(s, null, key); + if (timeValue.millis() < minValue.millis()) { + throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); + } + return timeValue; + }, dynamic, scope); + } + + public static Setting timeSetting(String key, TimeValue defaultValue, TimeValue minValue, boolean dynamic, Scope scope) { + return timeSetting(key, (s) -> defaultValue.getStringRep(), minValue, dynamic, scope); + } + + public static Setting timeSetting(String key, TimeValue defaultValue, boolean dynamic, Scope scope) { + return new Setting<>(key, (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, defaultValue, key), dynamic, scope); + } + + public static Setting doubleSetting(String key, double defaultValue, double minValue, boolean dynamic, Scope scope) { + return new Setting<>(key, (s) -> Double.toString(defaultValue), (s) -> { + final double d = Double.parseDouble(s); + if (d < minValue) { + throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); + } + return d; + }, dynamic, scope); + } + +} diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index 5e083a9e740..989b05d4bf2 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -597,6 +597,8 @@ public final class Settings implements ToXContent { return result.toArray(new String[result.size()]); } + + /** * Returns group settings for the given setting prefix. */ @@ -614,6 +616,9 @@ public final class Settings implements ToXContent { if (settingPrefix.charAt(settingPrefix.length() - 1) != '.') { settingPrefix = settingPrefix + "."; } + return getGroupsInternal(settingPrefix, ignoreNonGrouped); + } + private Map getGroupsInternal(String settingPrefix, boolean ignoreNonGrouped) throws SettingsException { // we don't really care that it might happen twice Map> map = new LinkedHashMap<>(); for (Object o : settings.keySet()) { @@ -643,6 +648,16 @@ public final class Settings implements ToXContent { } return Collections.unmodifiableMap(retVal); } + /** + * Returns group settings for the given setting prefix. + */ + public Map getAsGroups() throws SettingsException { + return getAsGroups(false); + } + + public Map getAsGroups(boolean ignoreNonGrouped) throws SettingsException { + return getGroupsInternal("", ignoreNonGrouped); + } /** * Returns a parsed version. @@ -706,7 +721,7 @@ public final class Settings implements ToXContent { Builder builder = new Builder(); int numberOfSettings = in.readVInt(); for (int i = 0; i < numberOfSettings; i++) { - builder.put(in.readString(), in.readString()); + builder.put(in.readString(), in.readOptionalString()); } return builder.build(); } @@ -715,7 +730,7 @@ public final class Settings implements ToXContent { out.writeVInt(settings.getAsMap().size()); for (Map.Entry entry : settings.getAsMap().entrySet()) { out.writeString(entry.getKey()); - out.writeString(entry.getValue()); + out.writeOptionalString(entry.getValue()); } } @@ -818,6 +833,10 @@ public final class Settings implements ToXContent { return this; } + public Builder putNull(String key) { + return put(key, (String) null); + } + /** * Sets a setting with the provided setting key and class as value. * diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 2ae4799d9f3..8bc8ce1b651 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -21,6 +21,10 @@ package org.elasticsearch.common.settings; import org.elasticsearch.common.inject.AbstractModule; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; + /** * A module that binds the provided settings to the {@link Settings} interface. * @@ -30,15 +34,36 @@ public class SettingsModule extends AbstractModule { private final Settings settings; private final SettingsFilter settingsFilter; + private final Map> clusterDynamicSettings = new HashMap<>(); + public SettingsModule(Settings settings, SettingsFilter settingsFilter) { this.settings = settings; this.settingsFilter = settingsFilter; + for (Setting setting : ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) { + registerSetting(setting); + } } @Override protected void configure() { bind(Settings.class).toInstance(settings); bind(SettingsFilter.class).toInstance(settingsFilter); + final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(clusterDynamicSettings.values())); + bind(ClusterSettings.class).toInstance(clusterSettings); } -} \ No newline at end of file + + public void registerSetting(Setting setting) { + switch (setting.getScope()) { + case CLUSTER: + if (clusterDynamicSettings.containsKey(setting.getKey())) { + throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); + } + clusterDynamicSettings.put(setting.getKey(), setting); + break; + case INDEX: + throw new UnsupportedOperationException("not yet implemented"); + } + } + +} diff --git a/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java b/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java index 725c7e56949..9c2f973b96e 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java +++ b/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java @@ -103,9 +103,9 @@ public abstract class XContentSettingsLoader implements SettingsLoader { } else if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { - // ignore this + serializeValue(settings, sb, path, parser, currentFieldName, true); } else { - serializeValue(settings, sb, path, parser, currentFieldName); + serializeValue(settings, sb, path, parser, currentFieldName, false); } } @@ -126,31 +126,33 @@ public abstract class XContentSettingsLoader implements SettingsLoader { } else if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { + serializeValue(settings, sb, path, parser, fieldName + '.' + (counter++), true); // ignore } else { - serializeValue(settings, sb, path, parser, fieldName + '.' + (counter++)); + serializeValue(settings, sb, path, parser, fieldName + '.' + (counter++), false); } } } - private void serializeValue(Map settings, StringBuilder sb, List path, XContentParser parser, String fieldName) throws IOException { + private void serializeValue(Map settings, StringBuilder sb, List path, XContentParser parser, String fieldName, boolean isNull) throws IOException { sb.setLength(0); for (String pathEle : path) { sb.append(pathEle).append('.'); } sb.append(fieldName); String key = sb.toString(); - String currentValue = parser.text(); - String previousValue = settings.put(key, currentValue); - if (previousValue != null) { + String currentValue = isNull ? null : parser.text(); + + if (settings.containsKey(key)) { throw new ElasticsearchParseException( "duplicate settings key [{}] found at line number [{}], column number [{}], previous value [{}], current value [{}]", key, parser.getTokenLocation().lineNumber, parser.getTokenLocation().columnNumber, - previousValue, + settings.get(key), currentValue ); } + settings.put(key, currentValue); } } diff --git a/core/src/main/java/org/elasticsearch/common/text/BytesText.java b/core/src/main/java/org/elasticsearch/common/text/BytesText.java deleted file mode 100644 index d78055db2bd..00000000000 --- a/core/src/main/java/org/elasticsearch/common/text/BytesText.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.text; - -import java.nio.charset.StandardCharsets; -import org.elasticsearch.common.bytes.BytesReference; - -/** - * A {@link BytesReference} representation of the text, will always convert on the fly to a {@link String}. - */ -public class BytesText implements Text { - - private BytesReference bytes; - private int hash; - - public BytesText(BytesReference bytes) { - this.bytes = bytes; - } - - @Override - public boolean hasBytes() { - return true; - } - - @Override - public BytesReference bytes() { - return bytes; - } - - @Override - public boolean hasString() { - return false; - } - - @Override - public String string() { - // TODO: we can optimize the conversion based on the bytes reference API similar to UnicodeUtil - if (!bytes.hasArray()) { - bytes = bytes.toBytesArray(); - } - return new String(bytes.array(), bytes.arrayOffset(), bytes.length(), StandardCharsets.UTF_8); - } - - @Override - public String toString() { - return string(); - } - - @Override - public int hashCode() { - if (hash == 0) { - hash = bytes.hashCode(); - } - return hash; - } - - @Override - public boolean equals(Object obj) { - return bytes().equals(((Text) obj).bytes()); - } - - @Override - public int compareTo(Text text) { - return UTF8SortedAsUnicodeComparator.utf8SortedAsUnicodeSortOrder.compare(bytes(), text.bytes()); - } -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/common/text/StringAndBytesText.java b/core/src/main/java/org/elasticsearch/common/text/StringAndBytesText.java deleted file mode 100644 index 36bf76ce441..00000000000 --- a/core/src/main/java/org/elasticsearch/common/text/StringAndBytesText.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.text; - -import java.nio.charset.StandardCharsets; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; - -/** - * Both {@link String} and {@link BytesReference} representation of the text. Starts with one of those, and if - * the other is requests, caches the other one in a local reference so no additional conversion will be needed. - */ -public class StringAndBytesText implements Text { - - public static final Text[] EMPTY_ARRAY = new Text[0]; - - public static Text[] convertFromStringArray(String[] strings) { - if (strings.length == 0) { - return EMPTY_ARRAY; - } - Text[] texts = new Text[strings.length]; - for (int i = 0; i < strings.length; i++) { - texts[i] = new StringAndBytesText(strings[i]); - } - return texts; - } - - private BytesReference bytes; - private String text; - private int hash; - - public StringAndBytesText(BytesReference bytes) { - this.bytes = bytes; - } - - public StringAndBytesText(String text) { - this.text = text; - } - - @Override - public boolean hasBytes() { - return bytes != null; - } - - @Override - public BytesReference bytes() { - if (bytes == null) { - bytes = new BytesArray(text.getBytes(StandardCharsets.UTF_8)); - } - return bytes; - } - - @Override - public boolean hasString() { - return text != null; - } - - @Override - public String string() { - // TODO: we can optimize the conversion based on the bytes reference API similar to UnicodeUtil - if (text == null) { - if (!bytes.hasArray()) { - bytes = bytes.toBytesArray(); - } - text = new String(bytes.array(), bytes.arrayOffset(), bytes.length(), StandardCharsets.UTF_8); - } - return text; - } - - @Override - public String toString() { - return string(); - } - - @Override - public int hashCode() { - if (hash == 0) { - hash = bytes().hashCode(); - } - return hash; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - return bytes().equals(((Text) obj).bytes()); - } - - @Override - public int compareTo(Text text) { - return UTF8SortedAsUnicodeComparator.utf8SortedAsUnicodeSortOrder.compare(bytes(), text.bytes()); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/text/StringText.java b/core/src/main/java/org/elasticsearch/common/text/StringText.java deleted file mode 100644 index 9d12096b2c0..00000000000 --- a/core/src/main/java/org/elasticsearch/common/text/StringText.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.text; - -import java.nio.charset.StandardCharsets; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; - -/** - * A {@link String} only representation of the text. Will always convert to bytes on the fly. - */ -public class StringText implements Text { - - public static final Text[] EMPTY_ARRAY = new Text[0]; - - public static Text[] convertFromStringArray(String[] strings) { - if (strings.length == 0) { - return EMPTY_ARRAY; - } - Text[] texts = new Text[strings.length]; - for (int i = 0; i < strings.length; i++) { - texts[i] = new StringText(strings[i]); - } - return texts; - } - - private final String text; - private int hash; - - public StringText(String text) { - this.text = text; - } - - @Override - public boolean hasBytes() { - return false; - } - - @Override - public BytesReference bytes() { - return new BytesArray(text.getBytes(StandardCharsets.UTF_8)); - } - - @Override - public boolean hasString() { - return true; - } - - @Override - public String string() { - return text; - } - - @Override - public String toString() { - return string(); - } - - @Override - public int hashCode() { - // we use bytes here so we can be consistent with other text implementations - if (hash == 0) { - hash = bytes().hashCode(); - } - return hash; - } - - @Override - public boolean equals(Object obj) { - // we use bytes here so we can be consistent with other text implementations - return bytes().equals(((Text) obj).bytes()); - } - - @Override - public int compareTo(Text text) { - return UTF8SortedAsUnicodeComparator.utf8SortedAsUnicodeSortOrder.compare(bytes(), text.bytes()); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/text/Text.java b/core/src/main/java/org/elasticsearch/common/text/Text.java index 9fe1ea5f35d..6b3b18c443c 100644 --- a/core/src/main/java/org/elasticsearch/common/text/Text.java +++ b/core/src/main/java/org/elasticsearch/common/text/Text.java @@ -18,39 +18,102 @@ */ package org.elasticsearch.common.text; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import java.nio.charset.StandardCharsets; /** - * Text represents a (usually) long text data. We use this abstraction instead of {@link String} - * so we can represent it in a more optimized manner in memory as well as serializing it over the - * network as well as converting it to json format. + * Both {@link String} and {@link BytesReference} representation of the text. Starts with one of those, and if + * the other is requests, caches the other one in a local reference so no additional conversion will be needed. */ -public interface Text extends Comparable { +public final class Text implements Comparable { + + public static final Text[] EMPTY_ARRAY = new Text[0]; + + public static Text[] convertFromStringArray(String[] strings) { + if (strings.length == 0) { + return EMPTY_ARRAY; + } + Text[] texts = new Text[strings.length]; + for (int i = 0; i < strings.length; i++) { + texts[i] = new Text(strings[i]); + } + return texts; + } + + private BytesReference bytes; + private String text; + private int hash; + + public Text(BytesReference bytes) { + this.bytes = bytes; + } + + public Text(String text) { + this.text = text; + } /** - * Are bytes available without the need to be converted into bytes when calling {@link #bytes()}. + * Whether a {@link BytesReference} view of the data is already materialized. */ - boolean hasBytes(); + public boolean hasBytes() { + return bytes != null; + } /** - * The UTF8 bytes representing the the text, might be converted on the fly, see {@link #hasBytes()} + * Returns a {@link BytesReference} view of the data. */ - BytesReference bytes(); + public BytesReference bytes() { + if (bytes == null) { + bytes = new BytesArray(text.getBytes(StandardCharsets.UTF_8)); + } + return bytes; + } /** - * Is there a {@link String} representation of the text. If not, then it {@link #hasBytes()}. + * Whether a {@link String} view of the data is already materialized. */ - boolean hasString(); + public boolean hasString() { + return text != null; + } /** - * Returns the string representation of the text, might be converted to a string on the fly. + * Returns a {@link String} view of the data. */ - String string(); + public String string() { + if (text == null) { + if (!bytes.hasArray()) { + bytes = bytes.toBytesArray(); + } + text = new String(bytes.array(), bytes.arrayOffset(), bytes.length(), StandardCharsets.UTF_8); + } + return text; + } - /** - * Returns the string representation of the text, might be converted to a string on the fly. - */ @Override - String toString(); + public String toString() { + return string(); + } + + @Override + public int hashCode() { + if (hash == 0) { + hash = bytes().hashCode(); + } + return hash; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + return bytes().equals(((Text) obj).bytes()); + } + + @Override + public int compareTo(Text text) { + return UTF8SortedAsUnicodeComparator.utf8SortedAsUnicodeSortOrder.compare(bytes(), text.bytes()); + } } diff --git a/core/src/main/java/org/elasticsearch/common/transport/LocalTransportAddress.java b/core/src/main/java/org/elasticsearch/common/transport/LocalTransportAddress.java index e3efa20af18..a5c6171392c 100644 --- a/core/src/main/java/org/elasticsearch/common/transport/LocalTransportAddress.java +++ b/core/src/main/java/org/elasticsearch/common/transport/LocalTransportAddress.java @@ -21,8 +21,6 @@ package org.elasticsearch.common.transport; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.transport.local.LocalTransport; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java b/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java index 910b1fc6af2..b5ccda41d15 100644 --- a/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java +++ b/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.transport; -import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable; diff --git a/core/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java b/core/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java index 686ad522446..6a99e06ac01 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java @@ -19,11 +19,11 @@ package org.elasticsearch.common.unit; -import java.util.Objects; - import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.monitor.jvm.JvmInfo; +import java.util.Objects; + import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue; /** Utility methods to get memory sizes. */ diff --git a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java index ee6371605ee..7a542e51022 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java @@ -32,7 +32,6 @@ import org.joda.time.format.PeriodFormat; import org.joda.time.format.PeriodFormatter; import java.io.IOException; -import java.io.Serializable; import java.util.Locale; import java.util.Objects; import java.util.concurrent.TimeUnit; @@ -229,6 +228,30 @@ public class TimeValue implements Streamable { return Strings.format1Decimals(value, suffix); } + public String getStringRep() { + if (duration < 0) { + return Long.toString(duration); + } + switch (timeUnit) { + case NANOSECONDS: + return Strings.format1Decimals(duration, "nanos"); + case MICROSECONDS: + return Strings.format1Decimals(duration, "micros"); + case MILLISECONDS: + return Strings.format1Decimals(duration, "ms"); + case SECONDS: + return Strings.format1Decimals(duration, "s"); + case MINUTES: + return Strings.format1Decimals(duration, "m"); + case HOURS: + return Strings.format1Decimals(duration, "h"); + case DAYS: + return Strings.format1Decimals(duration, "d"); + default: + throw new IllegalArgumentException("unknown time unit: " + timeUnit.name()); + } + } + public static TimeValue parseTimeValue(String sValue, TimeValue defaultValue, String settingName) { settingName = Objects.requireNonNull(settingName); assert settingName.startsWith("index.") == false || MetaDataIndexUpgradeService.INDEX_TIME_SETTINGS.contains(settingName) : settingName; diff --git a/core/src/main/java/org/elasticsearch/common/util/BytesRefHash.java b/core/src/main/java/org/elasticsearch/common/util/BytesRefHash.java index 5a2e21e5e11..ee29df51b55 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BytesRefHash.java +++ b/core/src/main/java/org/elasticsearch/common/util/BytesRefHash.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.util; import com.carrotsearch.hppc.BitMixer; - import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; diff --git a/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java b/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java index a605d66e80d..d45afead715 100644 --- a/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java +++ b/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.util; import org.apache.lucene.util.ThreadInterruptedException; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java b/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java index a36c37b22e9..739677342f7 100644 --- a/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java +++ b/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java @@ -23,9 +23,22 @@ import com.carrotsearch.hppc.DoubleArrayList; import com.carrotsearch.hppc.FloatArrayList; import com.carrotsearch.hppc.LongArrayList; import com.carrotsearch.hppc.ObjectArrayList; -import org.apache.lucene.util.*; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefArray; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.InPlaceMergeSorter; +import org.apache.lucene.util.IntroSorter; -import java.util.*; +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; +import java.util.RandomAccess; /** Collections-related utility methods. */ public class CollectionUtils { diff --git a/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java b/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java index d25113a54bb..17ae43b1449 100644 --- a/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java +++ b/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java @@ -24,7 +24,11 @@ import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Settings; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; /** * This class defines an official elasticsearch extension point. It registers diff --git a/core/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java b/core/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java index 63b7b23a62b..4095f5d7014 100644 --- a/core/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java +++ b/core/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.util; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lease.Releasables; import java.util.Iterator; diff --git a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java b/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java index 2cbc8cbdf99..8d049003824 100644 --- a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java +++ b/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java @@ -42,9 +42,19 @@ import org.elasticsearch.index.shard.ShardStateMetaData; import java.io.IOException; import java.io.PrintStream; import java.nio.charset.StandardCharsets; -import java.nio.file.*; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.FileVisitResult; +import java.nio.file.FileVisitor; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; /** */ diff --git a/core/src/main/java/org/elasticsearch/common/util/SingleObjectCache.java b/core/src/main/java/org/elasticsearch/common/util/SingleObjectCache.java index 3d7f00747c1..f3d710dab8c 100644 --- a/core/src/main/java/org/elasticsearch/common/util/SingleObjectCache.java +++ b/core/src/main/java/org/elasticsearch/common/util/SingleObjectCache.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.util; import org.elasticsearch.common.unit.TimeValue; -import java.io.IOException; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java index 6d2216d9daa..5fcb1a8f152 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java @@ -23,7 +23,11 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.transport.Transports; import java.util.Objects; -import java.util.concurrent.*; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.concurrent.locks.AbstractQueuedSynchronizer; public abstract class BaseFuture implements Future { diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 46fd7507f8b..140f026c357 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -22,7 +22,12 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.settings.Settings; import java.util.Arrays; -import java.util.concurrent.*; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedTransferQueue; +import java.util.concurrent.SynchronousQueue; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java index 09d644e664f..50d6df9a6a7 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.unit.TimeValue; /** * @@ -69,4 +68,4 @@ public abstract class PrioritizedRunnable implements Runnable, Comparable implements BiFunction clazz) { if (!coeerce) { - //Need to throw type IllegalArgumentException as current catch logic in + //Need to throw type IllegalArgumentException as current catch logic in //NumberFieldMapper.parseCreateField relies on this for "malformed" value detection throw new IllegalArgumentException(clazz.getSimpleName() + " value passed as String"); } } - + // The 3rd party parsers we rely on are known to silently truncate fractions: see // http://fasterxml.github.io/jackson-core/javadoc/2.3.0/com/fasterxml/jackson/core/JsonParser.html#getShortValue() // If this behaviour is flagged as undesirable and any truncation occurs @@ -120,7 +124,7 @@ public abstract class AbstractXContentParser implements XContentParser { return intValue(DEFAULT_NUMBER_COEERCE_POLICY); } - + @Override public int intValue(boolean coerce) throws IOException { Token token = currentToken(); @@ -130,7 +134,7 @@ public abstract class AbstractXContentParser implements XContentParser { } int result = doIntValue(); ensureNumberConversion(coerce, result, Integer.class); - return result; + return result; } protected abstract int doIntValue() throws IOException; @@ -139,7 +143,7 @@ public abstract class AbstractXContentParser implements XContentParser { public long longValue() throws IOException { return longValue(DEFAULT_NUMBER_COEERCE_POLICY); } - + @Override public long longValue(boolean coerce) throws IOException { Token token = currentToken(); @@ -149,7 +153,7 @@ public abstract class AbstractXContentParser implements XContentParser { } long result = doLongValue(); ensureNumberConversion(coerce, result, Long.class); - return result; + return result; } protected abstract long doLongValue() throws IOException; @@ -158,7 +162,7 @@ public abstract class AbstractXContentParser implements XContentParser { public float floatValue() throws IOException { return floatValue(DEFAULT_NUMBER_COEERCE_POLICY); } - + @Override public float floatValue(boolean coerce) throws IOException { Token token = currentToken(); @@ -171,7 +175,7 @@ public abstract class AbstractXContentParser implements XContentParser { protected abstract float doFloatValue() throws IOException; - + @Override public double doubleValue() throws IOException { return doubleValue(DEFAULT_NUMBER_COEERCE_POLICY); @@ -190,7 +194,7 @@ public abstract class AbstractXContentParser implements XContentParser { protected abstract double doDoubleValue() throws IOException; @Override - public String textOrNull() throws IOException { + public final String textOrNull() throws IOException { if (currentToken() == Token.VALUE_NULL) { return null; } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java b/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java index c24ddb7f296..994df37de30 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java @@ -24,9 +24,16 @@ import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentGenerator; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; -import java.io.*; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Reader; /** * A YAML based content implementation using Jackson. diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java index 20f2c96b120..6689d9c8688 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java @@ -23,9 +23,10 @@ import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.rest.RestStatus; import java.util.EnumSet; @@ -35,42 +36,40 @@ import java.util.EnumSet; */ public class DiscoverySettings extends AbstractComponent { + public final static int NO_MASTER_BLOCK_ID = 2; + public final static ClusterBlock NO_MASTER_BLOCK_ALL = new ClusterBlock(NO_MASTER_BLOCK_ID, "no master", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL); + public final static ClusterBlock NO_MASTER_BLOCK_WRITES = new ClusterBlock(NO_MASTER_BLOCK_ID, "no master", true, false, RestStatus.SERVICE_UNAVAILABLE, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); /** * sets the timeout for a complete publishing cycle, including both sending and committing. the master * will continute to process the next cluster state update after this time has elapsed **/ - public static final String PUBLISH_TIMEOUT = "discovery.zen.publish_timeout"; + public static final Setting PUBLISH_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.zen.publish_timeout", TimeValue.timeValueSeconds(30), true, Setting.Scope.CLUSTER); /** * sets the timeout for receiving enough acks for a specific cluster state and committing it. failing * to receive responses within this window will cause the cluster state change to be rejected. */ - public static final String COMMIT_TIMEOUT = "discovery.zen.commit_timeout"; - public static final String NO_MASTER_BLOCK = "discovery.zen.no_master_block"; - public static final String PUBLISH_DIFF_ENABLE = "discovery.zen.publish_diff.enable"; - - public static final TimeValue DEFAULT_PUBLISH_TIMEOUT = TimeValue.timeValueSeconds(30); - public static final TimeValue DEFAULT_COMMIT_TIMEOUT = TimeValue.timeValueSeconds(30); - public static final String DEFAULT_NO_MASTER_BLOCK = "write"; - public final static int NO_MASTER_BLOCK_ID = 2; - public final static boolean DEFAULT_PUBLISH_DIFF_ENABLE = true; - - public final static ClusterBlock NO_MASTER_BLOCK_ALL = new ClusterBlock(NO_MASTER_BLOCK_ID, "no master", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL); - public final static ClusterBlock NO_MASTER_BLOCK_WRITES = new ClusterBlock(NO_MASTER_BLOCK_ID, "no master", true, false, RestStatus.SERVICE_UNAVAILABLE, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); + public static final Setting COMMIT_TIMEOUT_SETTING = new Setting<>("discovery.zen.commit_timeout", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s), (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), true, Setting.Scope.CLUSTER); + public static final Setting NO_MASTER_BLOCK_SETTING = new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock, true, Setting.Scope.CLUSTER); + public static final Setting PUBLISH_DIFF_ENABLE_SETTING = Setting.boolSetting("discovery.zen.publish_diff.enable", true, true, Setting.Scope.CLUSTER); private volatile ClusterBlock noMasterBlock; private volatile TimeValue publishTimeout; + private volatile TimeValue commitTimeout; private volatile boolean publishDiff; @Inject - public DiscoverySettings(Settings settings, NodeSettingsService nodeSettingsService) { + public DiscoverySettings(Settings settings, ClusterSettings clusterSettings) { super(settings); - nodeSettingsService.addListener(new ApplySettings()); - this.noMasterBlock = parseNoMasterBlock(settings.get(NO_MASTER_BLOCK, DEFAULT_NO_MASTER_BLOCK)); - this.publishTimeout = settings.getAsTime(PUBLISH_TIMEOUT, DEFAULT_PUBLISH_TIMEOUT); - this.commitTimeout = settings.getAsTime(COMMIT_TIMEOUT, new TimeValue(Math.min(DEFAULT_COMMIT_TIMEOUT.millis(), publishTimeout.millis()))); - this.publishDiff = settings.getAsBoolean(PUBLISH_DIFF_ENABLE, DEFAULT_PUBLISH_DIFF_ENABLE); + clusterSettings.addSettingsUpdateConsumer(NO_MASTER_BLOCK_SETTING, this::setNoMasterBlock); + clusterSettings.addSettingsUpdateConsumer(PUBLISH_DIFF_ENABLE_SETTING, this::setPublishDiff); + clusterSettings.addSettingsUpdateConsumer(COMMIT_TIMEOUT_SETTING, this::setCommitTimeout); + clusterSettings.addSettingsUpdateConsumer(PUBLISH_TIMEOUT_SETTING, this::setPublishTimeout); + this.noMasterBlock = NO_MASTER_BLOCK_SETTING.get(settings); + this.publishTimeout = PUBLISH_TIMEOUT_SETTING.get(settings); + this.commitTimeout = COMMIT_TIMEOUT_SETTING.get(settings); + this.publishDiff = PUBLISH_DIFF_ENABLE_SETTING.get(settings); } /** @@ -88,47 +87,25 @@ public class DiscoverySettings extends AbstractComponent { return noMasterBlock; } - public boolean getPublishDiff() { return publishDiff;} - - private class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - TimeValue newPublishTimeout = settings.getAsTime(PUBLISH_TIMEOUT, null); - if (newPublishTimeout != null) { - if (newPublishTimeout.millis() != publishTimeout.millis()) { - logger.info("updating [{}] from [{}] to [{}]", PUBLISH_TIMEOUT, publishTimeout, newPublishTimeout); - publishTimeout = newPublishTimeout; - if (settings.getAsTime(COMMIT_TIMEOUT, null) == null && commitTimeout.millis() > publishTimeout.millis()) { - logger.info("reducing default [{}] to [{}] due to publish timeout change", COMMIT_TIMEOUT, publishTimeout); - commitTimeout = publishTimeout; - } - } - } - TimeValue newCommitTimeout = settings.getAsTime(COMMIT_TIMEOUT, null); - if (newCommitTimeout != null) { - if (newCommitTimeout.millis() != commitTimeout.millis()) { - logger.info("updating [{}] from [{}] to [{}]", COMMIT_TIMEOUT, commitTimeout, newCommitTimeout); - commitTimeout = newCommitTimeout; - } - } - String newNoMasterBlockValue = settings.get(NO_MASTER_BLOCK); - if (newNoMasterBlockValue != null) { - ClusterBlock newNoMasterBlock = parseNoMasterBlock(newNoMasterBlockValue); - if (newNoMasterBlock != noMasterBlock) { - noMasterBlock = newNoMasterBlock; - } - } - Boolean newPublishDiff = settings.getAsBoolean(PUBLISH_DIFF_ENABLE, null); - if (newPublishDiff != null) { - if (newPublishDiff != publishDiff) { - logger.info("updating [{}] from [{}] to [{}]", PUBLISH_DIFF_ENABLE, publishDiff, newPublishDiff); - publishDiff = newPublishDiff; - } - } - } + private void setNoMasterBlock(ClusterBlock noMasterBlock) { + this.noMasterBlock = noMasterBlock; } - private ClusterBlock parseNoMasterBlock(String value) { + private void setPublishDiff(boolean publishDiff) { + this.publishDiff = publishDiff; + } + + private void setPublishTimeout(TimeValue publishTimeout) { + this.publishTimeout = publishTimeout; + } + + private void setCommitTimeout(TimeValue commitTimeout) { + this.commitTimeout = commitTimeout; + } + + public boolean getPublishDiff() { return publishDiff;} + + private static ClusterBlock parseNoMasterBlock(String value) { switch (value) { case "all": return NO_MASTER_BLOCK_ALL; diff --git a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java index 947bf6099f8..4a3771c8e5a 100644 --- a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java @@ -20,7 +20,13 @@ package org.elasticsearch.discovery.local; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.Diff; +import org.elasticsearch.cluster.IncompatibleClusterStateVersionException; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeService; @@ -35,7 +41,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.discovery.*; +import org.elasticsearch.discovery.AckClusterStatePublishResponseHandler; +import org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandler; +import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.discovery.DiscoveryStats; +import org.elasticsearch.discovery.InitialStateDiscoveryListener; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.transport.TransportService; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java index 9cec672ad43..19a2cf06bf4 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java @@ -36,7 +36,11 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.membership.MembershipAction; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 03111d141ef..6398f31a8fd 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -22,7 +22,12 @@ package org.elasticsearch.discovery.zen; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -39,6 +44,8 @@ import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; @@ -55,12 +62,20 @@ import org.elasticsearch.discovery.zen.ping.ZenPingService; import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import org.elasticsearch.node.service.NodeService; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -74,7 +89,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; */ public class ZenDiscovery extends AbstractLifecycleComponent implements Discovery, PingContextProvider { - public final static String SETTING_REJOIN_ON_MASTER_GONE = "discovery.zen.rejoin_on_master_gone"; + public final static Setting REJOIN_ON_MASTER_GONE_SETTING = Setting.boolSetting("discovery.zen.rejoin_on_master_gone", true, true, Setting.Scope.CLUSTER); public final static String SETTING_PING_TIMEOUT = "discovery.zen.ping_timeout"; public final static String SETTING_JOIN_TIMEOUT = "discovery.zen.join_timeout"; public final static String SETTING_JOIN_RETRY_ATTEMPTS = "discovery.zen.join_retry_attempts"; @@ -139,7 +154,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen @Inject public ZenDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, - TransportService transportService, final ClusterService clusterService, NodeSettingsService nodeSettingsService, + TransportService transportService, final ClusterService clusterService, ClusterSettings clusterSettings, ZenPingService pingService, ElectMasterService electMasterService, DiscoverySettings discoverySettings) { super(settings); @@ -160,7 +175,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen this.masterElectionFilterClientNodes = settings.getAsBoolean(SETTING_MASTER_ELECTION_FILTER_CLIENT, true); this.masterElectionFilterDataNodes = settings.getAsBoolean(SETTING_MASTER_ELECTION_FILTER_DATA, false); this.masterElectionWaitForJoinsTimeout = settings.getAsTime(SETTING_MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT, TimeValue.timeValueMillis(joinTimeout.millis() / 2)); - this.rejoinOnMasterGone = settings.getAsBoolean(SETTING_REJOIN_ON_MASTER_GONE, true); + this.rejoinOnMasterGone = REJOIN_ON_MASTER_GONE_SETTING.get(settings); if (this.joinRetryAttempts < 1) { throw new IllegalArgumentException("'" + SETTING_JOIN_RETRY_ATTEMPTS + "' must be a positive number. got [" + SETTING_JOIN_RETRY_ATTEMPTS + "]"); @@ -171,7 +186,14 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen logger.debug("using ping_timeout [{}], join.timeout [{}], master_election.filter_client [{}], master_election.filter_data [{}]", this.pingTimeout, joinTimeout, masterElectionFilterClientNodes, masterElectionFilterDataNodes); - nodeSettingsService.addListener(new ApplySettings()); + clusterSettings.addSettingsUpdateConsumer(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, this::handleMinimumMasterNodesChanged, (value) -> { + final ClusterState clusterState = clusterService.state(); + int masterNodes = clusterState.nodes().masterNodes().size(); + if (value > masterNodes) { + throw new IllegalArgumentException("cannot set " + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() + " to more than the current master nodes count [" + masterNodes + "]"); + } + }); + clusterSettings.addSettingsUpdateConsumer(REJOIN_ON_MASTER_GONE_SETTING, this::setRejoingOnMasterGone); this.masterFD = new MasterFaultDetection(settings, threadPool, transportService, clusterName, clusterService); this.masterFD.addListener(new MasterNodeFailureListener()); @@ -306,6 +328,10 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen return clusterJoinsCounter.get() > 0; } + private void setRejoingOnMasterGone(boolean rejoin) { + this.rejoinOnMasterGone = rejoin; + } + /** end of {@link org.elasticsearch.discovery.zen.ping.PingContextProvider } implementation */ @@ -824,8 +850,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } } - void handleJoinRequest(final DiscoveryNode node, final MembershipAction.JoinCallback callback) { - + void handleJoinRequest(final DiscoveryNode node, final ClusterState state, final MembershipAction.JoinCallback callback) { if (!transportService.addressSupported(node.address().getClass())) { // TODO, what should we do now? Maybe inform that node that its crap? logger.warn("received a wrong address type from [{}], ignoring...", node); @@ -837,7 +862,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen // Sanity check: maybe we don't end up here, because serialization may have failed. if (node.getVersion().before(minimumNodeJoinVersion)) { callback.onFailure( - new IllegalStateException("Can't handle join request from a node with a version [" + node.getVersion() + "] that is lower than the minimum compatible version [" + minimumNodeJoinVersion.minimumCompatibilityVersion() + "]") + new IllegalStateException("Can't handle join request from a node with a version [" + node.getVersion() + "] that is lower than the minimum compatible version [" + minimumNodeJoinVersion.minimumCompatibilityVersion() + "]") ); return; } @@ -847,7 +872,13 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen // validate the join request, will throw a failure if it fails, which will get back to the // node calling the join request - membership.sendValidateJoinRequestBlocking(node, joinTimeout); + try { + membership.sendValidateJoinRequestBlocking(node, state, joinTimeout); + } catch (Throwable e) { + logger.warn("failed to validate incoming join request from node [{}]", node); + callback.onFailure(new IllegalStateException("failure when sending a validation request to node", e)); + return; + } nodeJoinController.handleJoinRequest(node, callback); } } @@ -1027,7 +1058,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen private class MembershipListener implements MembershipAction.MembershipListener { @Override public void onJoin(DiscoveryNode node, MembershipAction.JoinCallback callback) { - handleJoinRequest(node, callback); + handleJoinRequest(node, clusterService.state(), callback); } @Override @@ -1139,26 +1170,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } } - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - int minimumMasterNodes = settings.getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, - ZenDiscovery.this.electMaster.minimumMasterNodes()); - if (minimumMasterNodes != ZenDiscovery.this.electMaster.minimumMasterNodes()) { - logger.info("updating {} from [{}] to [{}]", ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, - ZenDiscovery.this.electMaster.minimumMasterNodes(), minimumMasterNodes); - handleMinimumMasterNodesChanged(minimumMasterNodes); - } - - boolean rejoinOnMasterGone = settings.getAsBoolean(SETTING_REJOIN_ON_MASTER_GONE, ZenDiscovery.this.rejoinOnMasterGone); - if (rejoinOnMasterGone != ZenDiscovery.this.rejoinOnMasterGone) { - logger.info("updating {} from [{}] to [{}]", SETTING_REJOIN_ON_MASTER_GONE, ZenDiscovery.this.rejoinOnMasterGone, rejoinOnMasterGone); - ZenDiscovery.this.rejoinOnMasterGone = rejoinOnMasterGone; - } - } - } - - /** * All control of the join thread should happen under the cluster state update task thread. * This is important to make sure that the background joining process is always in sync with any cluster state updates diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java index 9164a85388a..9cca1edfc5e 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java @@ -22,11 +22,10 @@ package org.elasticsearch.discovery.zen.elect; import com.carrotsearch.hppc.ObjectContainer; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -41,23 +40,7 @@ import java.util.List; */ public class ElectMasterService extends AbstractComponent { - public static final String DISCOVERY_ZEN_MINIMUM_MASTER_NODES = "discovery.zen.minimum_master_nodes"; - public static final Validator DISCOVERY_ZEN_MINIMUM_MASTER_NODES_VALIDATOR = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - int intValue; - try { - intValue = Integer.parseInt(value); - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as an integer"; - } - int masterNodes = clusterState.nodes().masterNodes().size(); - if (intValue > masterNodes) { - return "cannot set " + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES + " to more than the current master nodes count [" + masterNodes + "]"; - } - return null; - } - }; + public static final Setting DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING = Setting.intSetting("discovery.zen.minimum_master_nodes", -1, true, Setting.Scope.CLUSTER); // This is the minimum version a master needs to be on, otherwise it gets ignored // This is based on the minimum compatible version of the current version this node is on @@ -70,7 +53,7 @@ public class ElectMasterService extends AbstractComponent { public ElectMasterService(Settings settings, Version version) { super(settings); this.minMasterVersion = version.minimumCompatibilityVersion(); - this.minimumMasterNodes = settings.getAsInt(DISCOVERY_ZEN_MINIMUM_MASTER_NODES, -1); + this.minimumMasterNodes = DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.get(settings); logger.debug("using minimum_master_nodes [{}]", minimumMasterNodes); } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java index 8842bafb116..73be1d3bb28 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Nullable; @@ -31,9 +32,16 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.concurrent.CopyOnWriteArrayList; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java index 2abe730b1e8..9386ff6356e 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java @@ -28,7 +28,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.concurrent.ConcurrentMap; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java index 4260b992ddb..04af8207c37 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.discovery.zen.membership; import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; @@ -28,7 +29,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.concurrent.TimeUnit; @@ -88,10 +94,6 @@ public class MembershipAction extends AbstractComponent { transportService.submitRequest(masterNode, DISCOVERY_LEAVE_ACTION_NAME, new LeaveRequest(node), EmptyTransportResponseHandler.INSTANCE_SAME).txGet(timeout.millis(), TimeUnit.MILLISECONDS); } - public void sendJoinRequest(DiscoveryNode masterNode, DiscoveryNode node) { - transportService.sendRequest(masterNode, DISCOVERY_JOIN_ACTION_NAME, new JoinRequest(node), EmptyTransportResponseHandler.INSTANCE_SAME); - } - public void sendJoinRequestBlocking(DiscoveryNode masterNode, DiscoveryNode node, TimeValue timeout) { transportService.submitRequest(masterNode, DISCOVERY_JOIN_ACTION_NAME, new JoinRequest(node), EmptyTransportResponseHandler.INSTANCE_SAME) .txGet(timeout.millis(), TimeUnit.MILLISECONDS); @@ -100,8 +102,8 @@ public class MembershipAction extends AbstractComponent { /** * Validates the join request, throwing a failure if it failed. */ - public void sendValidateJoinRequestBlocking(DiscoveryNode node, TimeValue timeout) { - transportService.submitRequest(node, DISCOVERY_JOIN_VALIDATE_ACTION_NAME, new ValidateJoinRequest(), EmptyTransportResponseHandler.INSTANCE_SAME) + public void sendValidateJoinRequestBlocking(DiscoveryNode node, ClusterState state, TimeValue timeout) { + transportService.submitRequest(node, DISCOVERY_JOIN_VALIDATE_ACTION_NAME, new ValidateJoinRequest(state), EmptyTransportResponseHandler.INSTANCE_SAME) .txGet(timeout.millis(), TimeUnit.MILLISECONDS); } @@ -156,9 +158,26 @@ public class MembershipAction extends AbstractComponent { } } - public static class ValidateJoinRequest extends TransportRequest { + class ValidateJoinRequest extends TransportRequest { + private ClusterState state; - public ValidateJoinRequest() { + ValidateJoinRequest() { + } + + ValidateJoinRequest(ClusterState state) { + this.state = state; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.state = ClusterState.Builder.readFrom(in, nodesProvider.nodes().localNode()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + this.state.writeTo(out); } } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPing.java index 18f734f7136..f97b31861ec 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPing.java @@ -19,7 +19,6 @@ package org.elasticsearch.discovery.zen.ping; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.LifecycleComponent; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java index 91fd622023f..acb5f640db0 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java @@ -21,7 +21,11 @@ package org.elasticsearch.discovery.zen.publish; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.Diff; +import org.elasticsearch.cluster.IncompatibleClusterStateVersionException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.bytes.BytesReference; @@ -40,10 +44,22 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BytesTransportRequest; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 3a1b430f98b..93e95dfaa96 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -21,7 +21,12 @@ package org.elasticsearch.env; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.SegmentInfos; -import org.apache.lucene.store.*; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.Lock; +import org.apache.lucene.store.LockObtainFailedException; +import org.apache.lucene.store.NativeFSLockFactory; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -31,6 +36,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -38,11 +44,25 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.monitor.fs.FsProbe; +import org.elasticsearch.monitor.jvm.JvmInfo; import java.io.Closeable; import java.io.IOException; -import java.nio.file.*; -import java.util.*; +import java.nio.file.AtomicMoveNotSupportedException; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -145,7 +165,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { for (int dirIndex = 0; dirIndex < environment.dataWithClusterFiles().length; dirIndex++) { Path dir = environment.dataWithClusterFiles()[dirIndex].resolve(NODES_FOLDER).resolve(Integer.toString(possibleLockId)); Files.createDirectories(dir); - + try (Directory luceneDir = FSDirectory.open(dir, NativeFSLockFactory.INSTANCE)) { logger.trace("obtaining node lock on {} ...", dir.toAbsolutePath()); try { @@ -187,6 +207,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { } maybeLogPathDetails(); + maybeLogHeapDetails(); if (settings.getAsBoolean(SETTING_ENABLE_LUCENE_SEGMENT_INFOS_TRACE, false)) { SegmentInfos.setInfoStream(System.out); @@ -274,6 +295,13 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { } } + private void maybeLogHeapDetails() { + JvmInfo jvmInfo = JvmInfo.jvmInfo(); + ByteSizeValue maxHeapSize = jvmInfo.getMem().getHeapMax(); + String useCompressedOops = jvmInfo.useCompressedOops(); + logger.info("heap size [{}], compressed ordinary object pointers [{}]", maxHeapSize, useCompressedOops); + } + private static String toString(Collection items) { StringBuilder b = new StringBuilder(); for(String item : items) { @@ -811,7 +839,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { // Sanity check: assert Integer.parseInt(shardPath.getName(count-1).toString()) >= 0; assert "indices".equals(shardPath.getName(count-3).toString()); - + return shardPath.getParent().getParent().getParent(); } } diff --git a/core/src/main/java/org/elasticsearch/env/ShardLock.java b/core/src/main/java/org/elasticsearch/env/ShardLock.java index 13561f89ab0..4ff1237ba20 100644 --- a/core/src/main/java/org/elasticsearch/env/ShardLock.java +++ b/core/src/main/java/org/elasticsearch/env/ShardLock.java @@ -19,13 +19,10 @@ package org.elasticsearch.env; -import org.apache.lucene.store.Lock; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.index.shard.ShardId; import java.io.Closeable; import java.io.IOException; -import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; /** diff --git a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java index 4c72894969e..4524222d5a3 100644 --- a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java +++ b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java @@ -19,7 +19,6 @@ package org.elasticsearch.gateway; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; diff --git a/core/src/main/java/org/elasticsearch/gateway/Gateway.java b/core/src/main/java/org/elasticsearch/gateway/Gateway.java index e89cd6c8577..bbb2670e194 100644 --- a/core/src/main/java/org/elasticsearch/gateway/Gateway.java +++ b/core/src/main/java/org/elasticsearch/gateway/Gateway.java @@ -24,7 +24,11 @@ import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.component.AbstractComponent; diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java index 0850064f320..acd650bc6f7 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingService; diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java index e83ec695a96..80e3be78093 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -20,7 +20,11 @@ package org.elasticsearch.gateway; import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -227,7 +231,7 @@ public class GatewayService extends AbstractLifecycleComponent i // automatically generate a UID for the metadata if we need to metaDataBuilder.generateClusterUuidIfNeeded(); - if (recoveredState.metaData().settings().getAsBoolean(MetaData.SETTING_READ_ONLY, false) || currentState.metaData().settings().getAsBoolean(MetaData.SETTING_READ_ONLY, false)) { + if (MetaData.SETTING_READ_ONLY_SETTING.get(recoveredState.metaData().settings()) || MetaData.SETTING_READ_ONLY_SETTING.get(currentState.metaData().settings())) { blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); } diff --git a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java index 9ac1768522c..5c7b31de23a 100644 --- a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java +++ b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java @@ -22,19 +22,31 @@ import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; -import org.apache.lucene.store.*; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.OutputStreamIndexOutput; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; -import java.nio.file.*; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Collection; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index e560b4458b7..83eaa791485 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -20,6 +20,7 @@ package org.elasticsearch.gateway; import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -30,8 +31,16 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; /** * The primary shard allocator allocates primary shard that were not created as @@ -39,6 +48,7 @@ import java.util.*; */ public abstract class PrimaryShardAllocator extends AbstractComponent { + @Deprecated public static final String INDEX_RECOVERY_INITIAL_SHARDS = "index.recovery.initial_shards"; private final String initialShards; @@ -56,13 +66,21 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator(); while (unassignedIterator.hasNext()) { - ShardRouting shard = unassignedIterator.next(); + final ShardRouting shard = unassignedIterator.next(); - if (needToFindPrimaryCopy(shard) == false) { + if (shard.primary() == false) { continue; } - AsyncShardFetch.FetchResult shardState = fetchData(shard, allocation); + final IndexMetaData indexMetaData = metaData.index(shard.getIndex()); + final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings, Collections.emptyList()); + + if (shard.allocatedPostIndexCreate(indexMetaData) == false) { + // when we create a fresh index + continue; + } + + final AsyncShardFetch.FetchResult shardState = fetchData(shard, allocation); if (shardState.hasData() == false) { logger.trace("{}: ignoring allocation, still fetching shard started state", shard); allocation.setHasPendingAsyncFetch(); @@ -70,25 +88,50 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { continue; } - IndexMetaData indexMetaData = metaData.index(shard.getIndex()); - Settings indexSettings = Settings.builder().put(settings).put(indexMetaData.getSettings()).build(); + final Set lastActiveAllocationIds = indexMetaData.activeAllocationIds(shard.id()); + final boolean snapshotRestore = shard.restoreSource() != null; + final boolean recoverOnAnyNode = recoverOnAnyNode(indexSettings); - NodesAndVersions nodesAndVersions = buildNodesAndVersions(shard, recoverOnAnyNode(indexSettings), allocation.getIgnoreNodes(shard.shardId()), shardState); - logger.debug("[{}][{}] found {} allocations of {}, highest version: [{}]", shard.index(), shard.id(), nodesAndVersions.allocationsFound, shard, nodesAndVersions.highestVersion); + final NodesAndVersions nodesAndVersions; + final boolean enoughAllocationsFound; - if (isEnoughAllocationsFound(shard, indexMetaData, nodesAndVersions) == false) { - // if we are restoring this shard we still can allocate - if (shard.restoreSource() == null) { + if (lastActiveAllocationIds.isEmpty()) { + assert indexSettings.getIndexVersionCreated().before(Version.V_3_0_0) : "trying to allocated a primary with an empty allocation id set, but index is new"; + // when we load an old index (after upgrading cluster) or restore a snapshot of an old index + // fall back to old version-based allocation mode + // Note that once the shard has been active, lastActiveAllocationIds will be non-empty + nodesAndVersions = buildNodesAndVersions(shard, snapshotRestore || recoverOnAnyNode, allocation.getIgnoreNodes(shard.shardId()), shardState); + if (snapshotRestore || recoverOnAnyNode) { + enoughAllocationsFound = nodesAndVersions.allocationsFound > 0; + } else { + enoughAllocationsFound = isEnoughVersionBasedAllocationsFound(shard, indexMetaData, nodesAndVersions); + } + logger.debug("[{}][{}]: version-based allocation for pre-{} index found {} allocations of {}, highest version: [{}]", shard.index(), shard.id(), Version.V_3_0_0, nodesAndVersions.allocationsFound, shard, nodesAndVersions.highestVersion); + } else { + assert lastActiveAllocationIds.isEmpty() == false; + // use allocation ids to select nodes + nodesAndVersions = buildAllocationIdBasedNodes(shard, snapshotRestore || recoverOnAnyNode, + allocation.getIgnoreNodes(shard.shardId()), lastActiveAllocationIds, shardState); + enoughAllocationsFound = nodesAndVersions.allocationsFound > 0; + logger.debug("[{}][{}]: found {} allocations of {} based on allocation ids: [{}]", shard.index(), shard.id(), nodesAndVersions.allocationsFound, shard, lastActiveAllocationIds); + } + + if (enoughAllocationsFound == false){ + if (snapshotRestore) { + // let BalancedShardsAllocator take care of allocating this shard + logger.debug("[{}][{}]: missing local data, will restore from [{}]", shard.index(), shard.id(), shard.restoreSource()); + } else if (recoverOnAnyNode) { + // let BalancedShardsAllocator take care of allocating this shard + logger.debug("[{}][{}]: missing local data, recover from any node", shard.index(), shard.id()); + } else { // we can't really allocate, so ignore it and continue unassignedIterator.removeAndIgnore(); logger.debug("[{}][{}]: not allocating, number_of_allocated_shards_found [{}]", shard.index(), shard.id(), nodesAndVersions.allocationsFound); - } else { - logger.debug("[{}][{}]: missing local data, will restore from [{}]", shard.index(), shard.id(), shard.restoreSource()); } continue; } - NodesToAllocate nodesToAllocate = buildNodesToAllocate(shard, allocation, nodesAndVersions); + final NodesToAllocate nodesToAllocate = buildNodesToAllocate(shard, allocation, nodesAndVersions.nodes); if (nodesToAllocate.yesNodes.isEmpty() == false) { DiscoveryNode node = nodesToAllocate.yesNodes.get(0); logger.debug("[{}][{}]: allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, node); @@ -109,63 +152,99 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { } /** - * Does the shard need to find a primary copy? + * Builds a list of nodes. If matchAnyShard is set to false, only nodes that have an allocation id matching + * lastActiveAllocationIds are added to the list. Otherwise, any node that has a shard is added to the list, but + * entries with matching allocation id are always at the front of the list. */ - boolean needToFindPrimaryCopy(ShardRouting shard) { - if (shard.primary() == false) { - return false; + protected NodesAndVersions buildAllocationIdBasedNodes(ShardRouting shard, boolean matchAnyShard, Set ignoreNodes, + Set lastActiveAllocationIds, AsyncShardFetch.FetchResult shardState) { + List matchingNodes = new ArrayList<>(); + List nonMatchingNodes = new ArrayList<>(); + long highestVersion = -1; + for (TransportNodesListGatewayStartedShards.NodeGatewayStartedShards nodeShardState : shardState.getData().values()) { + DiscoveryNode node = nodeShardState.getNode(); + String allocationId = nodeShardState.allocationId(); + + if (ignoreNodes.contains(node.id())) { + continue; + } + + if (nodeShardState.storeException() == null) { + if (allocationId == null && nodeShardState.version() != -1) { + // old shard with no allocation id, assign dummy value so that it gets added below in case of matchAnyShard + allocationId = "_n/a_"; + } + + logger.trace("[{}] on node [{}] has allocation id [{}] of shard", shard, nodeShardState.getNode(), allocationId); + } else { + logger.trace("[{}] on node [{}] has allocation id [{}] but the store can not be opened, treating as no allocation id", nodeShardState.storeException(), shard, nodeShardState.getNode(), allocationId); + allocationId = null; + } + + if (allocationId != null) { + if (lastActiveAllocationIds.contains(allocationId)) { + matchingNodes.add(node); + highestVersion = Math.max(highestVersion, nodeShardState.version()); + } else if (matchAnyShard) { + nonMatchingNodes.add(node); + highestVersion = Math.max(highestVersion, nodeShardState.version()); + } + } } - // this is an API allocation, ignore since we know there is no data... - if (shard.allocatedPostIndexCreate() == false) { - return false; - } + List nodes = new ArrayList<>(); + nodes.addAll(matchingNodes); + nodes.addAll(nonMatchingNodes); - return true; + if (logger.isTraceEnabled()) { + logger.trace("{} candidates for allocation: {}", shard, nodes.stream().map(DiscoveryNode::name).collect(Collectors.joining(", "))); + } + return new NodesAndVersions(nodes, nodes.size(), highestVersion); } - private boolean isEnoughAllocationsFound(ShardRouting shard, IndexMetaData indexMetaData, NodesAndVersions nodesAndVersions) { + /** + * used by old version-based allocation + */ + private boolean isEnoughVersionBasedAllocationsFound(ShardRouting shard, IndexMetaData indexMetaData, NodesAndVersions nodesAndVersions) { // check if the counts meets the minimum set int requiredAllocation = 1; // if we restore from a repository one copy is more then enough - if (shard.restoreSource() == null) { - try { - String initialShards = indexMetaData.getSettings().get(INDEX_RECOVERY_INITIAL_SHARDS, settings.get(INDEX_RECOVERY_INITIAL_SHARDS, this.initialShards)); - if ("quorum".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 1) { - requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2) + 1; - } - } else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 2) { - requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2); - } - } else if ("one".equals(initialShards)) { - requiredAllocation = 1; - } else if ("full".equals(initialShards) || "all".equals(initialShards)) { - requiredAllocation = indexMetaData.getNumberOfReplicas() + 1; - } else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 1) { - requiredAllocation = indexMetaData.getNumberOfReplicas(); - } - } else { - requiredAllocation = Integer.parseInt(initialShards); + try { + String initialShards = indexMetaData.getSettings().get(INDEX_RECOVERY_INITIAL_SHARDS, settings.get(INDEX_RECOVERY_INITIAL_SHARDS, this.initialShards)); + if ("quorum".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 1) { + requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2) + 1; } - } catch (Exception e) { - logger.warn("[{}][{}] failed to derived initial_shards from value {}, ignore allocation for {}", shard.index(), shard.id(), initialShards, shard); + } else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 2) { + requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2); + } + } else if ("one".equals(initialShards)) { + requiredAllocation = 1; + } else if ("full".equals(initialShards) || "all".equals(initialShards)) { + requiredAllocation = indexMetaData.getNumberOfReplicas() + 1; + } else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 1) { + requiredAllocation = indexMetaData.getNumberOfReplicas(); + } + } else { + requiredAllocation = Integer.parseInt(initialShards); } + } catch (Exception e) { + logger.warn("[{}][{}] failed to derived initial_shards from value {}, ignore allocation for {}", shard.index(), shard.id(), initialShards, shard); } return nodesAndVersions.allocationsFound >= requiredAllocation; } /** - * Based on the nodes and versions, build the list of yes/no/throttle nodes that the shard applies to. + * Split the list of nodes to lists of yes/no/throttle nodes based on allocation deciders */ - private NodesToAllocate buildNodesToAllocate(ShardRouting shard, RoutingAllocation allocation, NodesAndVersions nodesAndVersions) { + private NodesToAllocate buildNodesToAllocate(ShardRouting shard, RoutingAllocation allocation, List nodes) { List yesNodes = new ArrayList<>(); List throttledNodes = new ArrayList<>(); List noNodes = new ArrayList<>(); - for (DiscoveryNode discoNode : nodesAndVersions.nodes) { + for (DiscoveryNode discoNode : nodes) { RoutingNode node = allocation.routingNodes().node(discoNode.id()); if (node == null) { continue; @@ -184,9 +263,11 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { } /** - * Builds a list of nodes and version + * Builds a list of nodes. If matchAnyShard is set to false, only nodes that have the highest shard version + * are added to the list. Otherwise, any node that has a shard is added to the list, but entries with highest + * version are always at the front of the list. */ - NodesAndVersions buildNodesAndVersions(ShardRouting shard, boolean recoveryOnAnyNode, Set ignoreNodes, + NodesAndVersions buildNodesAndVersions(ShardRouting shard, boolean matchAnyShard, Set ignoreNodes, AsyncShardFetch.FetchResult shardState) { final Map nodesWithVersion = new HashMap<>(); int numberOfAllocationsFound = 0; @@ -208,20 +289,15 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { version = -1; } - if (recoveryOnAnyNode) { - numberOfAllocationsFound++; - if (version > highestVersion) { - highestVersion = version; - } - // We always put the node without clearing the map - nodesWithVersion.put(node, version); - } else if (version != -1) { + if (version != -1) { numberOfAllocationsFound++; // If we've found a new "best" candidate, clear the // current candidates and add it if (version > highestVersion) { highestVersion = version; - nodesWithVersion.clear(); + if (matchAnyShard == false) { + nodesWithVersion.clear(); + } nodesWithVersion.put(node, version); } else if (version == highestVersion) { // If the candidate is the same, add it to the @@ -258,9 +334,9 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { * Return {@code true} if the index is configured to allow shards to be * recovered on any node */ - private boolean recoverOnAnyNode(Settings idxSettings) { - return IndexMetaData.isOnSharedFilesystem(idxSettings) && - idxSettings.getAsBoolean(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false); + private boolean recoverOnAnyNode(IndexSettings indexSettings) { + return indexSettings.isOnSharedFilesystem() + && indexSettings.getSettings().getAsBoolean(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false); } protected abstract AsyncShardFetch.FetchResult fetchData(ShardRouting shard, RoutingAllocation allocation); diff --git a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index c87f4d94755..0b5f2bc58d9 100644 --- a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -24,6 +24,8 @@ import com.carrotsearch.hppc.ObjectLongMap; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectLongCursor; import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; @@ -56,6 +58,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { */ public boolean processExistingRecoveries(RoutingAllocation allocation) { boolean changed = false; + MetaData metaData = allocation.metaData(); for (RoutingNodes.RoutingNodesIterator nodes = allocation.routingNodes().nodes(); nodes.hasNext(); ) { nodes.next(); for (RoutingNodes.RoutingNodeIterator it = nodes.nodeShards(); it.hasNext(); ) { @@ -69,8 +72,10 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { if (shard.relocatingNodeId() != null) { continue; } + // if we are allocating a replica because of index creation, no need to go and find a copy, there isn't one... - if (shard.allocatedPostIndexCreate() == false) { + IndexMetaData indexMetaData = metaData.index(shard.getIndex()); + if (shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } @@ -114,6 +119,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { boolean changed = false; final RoutingNodes routingNodes = allocation.routingNodes(); final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator(); + MetaData metaData = allocation.metaData(); while (unassignedIterator.hasNext()) { ShardRouting shard = unassignedIterator.next(); if (shard.primary()) { @@ -121,7 +127,8 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { } // if we are allocating a replica because of index creation, no need to go and find a copy, there isn't one... - if (shard.allocatedPostIndexCreate() == false) { + IndexMetaData indexMetaData = metaData.index(shard.getIndex()); + if (shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index d91b4bd8cdd..27ee0c17dab 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; @@ -139,7 +138,8 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction Store.tryOpenIndex(shardPath.resolveIndex()); } catch (Exception exception) { logger.trace("{} can't open index for shard [{}] in path [{}]", exception, shardId, shardStateMetaData, (shardPath != null) ? shardPath.resolveIndex() : ""); - return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, exception); + String allocationId = shardStateMetaData.allocationId != null ? shardStateMetaData.allocationId.getId() : null; + return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId, exception); } } // old shard metadata doesn't have the actual index UUID so we need to check if the actual uuid in the metadata @@ -149,11 +149,12 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction logger.warn("{} shard state info found but indexUUID didn't match expected [{}] actual [{}]", shardId, indexUUID, shardStateMetaData.indexUUID); } else { logger.debug("{} shard state info found: [{}]", shardId, shardStateMetaData); - return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version); + String allocationId = shardStateMetaData.allocationId != null ? shardStateMetaData.allocationId.getId() : null; + return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId); } } logger.trace("{} no local shard info found", shardId); - return new NodeGatewayStartedShards(clusterService.localNode(), -1); + return new NodeGatewayStartedShards(clusterService.localNode(), -1, null); } catch (Exception e) { throw new ElasticsearchException("failed to load started shards", e); } @@ -277,17 +278,19 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction public static class NodeGatewayStartedShards extends BaseNodeResponse { private long version = -1; + private String allocationId = null; private Throwable storeException = null; public NodeGatewayStartedShards() { } - public NodeGatewayStartedShards(DiscoveryNode node, long version) { - this(node, version, null); + public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId) { + this(node, version, allocationId, null); } - public NodeGatewayStartedShards(DiscoveryNode node, long version, Throwable storeException) { + public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId, Throwable storeException) { super(node); this.version = version; + this.allocationId = allocationId; this.storeException = storeException; } @@ -295,6 +298,10 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction return this.version; } + public String allocationId() { + return this.allocationId; + } + public Throwable storeException() { return this.storeException; } @@ -303,16 +310,17 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction public void readFrom(StreamInput in) throws IOException { super.readFrom(in); version = in.readLong(); + allocationId = in.readOptionalString(); if (in.readBoolean()) { storeException = in.readThrowable(); } - } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeLong(version); + out.writeOptionalString(allocationId); if (storeException != null) { out.writeBoolean(true); out.writeThrowable(storeException); diff --git a/core/src/main/java/org/elasticsearch/http/HttpServerModule.java b/core/src/main/java/org/elasticsearch/http/HttpServerModule.java deleted file mode 100644 index 49d67369643..00000000000 --- a/core/src/main/java/org/elasticsearch/http/HttpServerModule.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.http; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.http.netty.NettyHttpServerTransport; - -import java.util.Objects; - -/** - * - */ -public class HttpServerModule extends AbstractModule { - - private final Settings settings; - private final ESLogger logger; - - private Class httpServerTransportClass; - - public HttpServerModule(Settings settings) { - this.settings = settings; - this.logger = Loggers.getLogger(getClass(), settings); - this.httpServerTransportClass = NettyHttpServerTransport.class; - } - - @SuppressWarnings({"unchecked"}) - @Override - protected void configure() { - bind(HttpServerTransport.class).to(httpServerTransportClass).asEagerSingleton(); - bind(HttpServer.class).asEagerSingleton(); - } - - public void setHttpServerTransport(Class httpServerTransport, String source) { - Objects.requireNonNull(httpServerTransport, "Configured http server transport may not be null"); - Objects.requireNonNull(source, "Plugin, that changes transport may not be null"); - logger.info("Using [{}] as http transport, overridden by [{}]", httpServerTransportClass.getName(), source); - this.httpServerTransportClass = httpServerTransport; - } -} diff --git a/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java b/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java index 2eb57f187fa..5c05efcd170 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java +++ b/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java @@ -21,7 +21,11 @@ package org.elasticsearch.http.netty; import org.elasticsearch.http.netty.pipelining.OrderedUpstreamMessageEvent; import org.elasticsearch.rest.support.RestUtils; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.ChannelHandler; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ExceptionEvent; +import org.jboss.netty.channel.MessageEvent; +import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.handler.codec.http.HttpRequest; import java.util.regex.Pattern; diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java index 19946c27342..7fcc7b65fba 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java @@ -32,16 +32,36 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.support.RestUtils; import org.jboss.netty.buffer.ChannelBuffer; -import org.jboss.netty.channel.*; -import org.jboss.netty.handler.codec.http.*; +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelFuture; +import org.jboss.netty.channel.ChannelFutureListener; +import org.jboss.netty.handler.codec.http.Cookie; +import org.jboss.netty.handler.codec.http.CookieDecoder; +import org.jboss.netty.handler.codec.http.CookieEncoder; +import org.jboss.netty.handler.codec.http.DefaultHttpResponse; +import org.jboss.netty.handler.codec.http.HttpHeaders; +import org.jboss.netty.handler.codec.http.HttpMethod; +import org.jboss.netty.handler.codec.http.HttpResponseStatus; +import org.jboss.netty.handler.codec.http.HttpVersion; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; -import static org.elasticsearch.http.netty.NettyHttpServerTransport.*; -import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.*; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_CREDENTIALS; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_HEADERS; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_METHODS; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_ORIGIN; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ENABLED; +import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_MAX_AGE; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_CREDENTIALS; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_HEADERS; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_METHODS; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_ORIGIN; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_MAX_AGE; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ORIGIN; +import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.USER_AGENT; /** * diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index ebc655ae4a5..899bbdc86e2 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -29,17 +29,35 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.*; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.NetworkExceptionHelper; +import org.elasticsearch.common.transport.PortsRange; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.http.*; +import org.elasticsearch.http.BindHttpException; +import org.elasticsearch.http.HttpChannel; +import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.http.HttpRequest; +import org.elasticsearch.http.HttpServerAdapter; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.http.HttpStats; import org.elasticsearch.http.netty.pipelining.HttpPipeliningHandler; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.transport.BindTransportException; import org.jboss.netty.bootstrap.ServerBootstrap; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.AdaptiveReceiveBufferSizePredictorFactory; +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ChannelPipeline; +import org.jboss.netty.channel.ChannelPipelineFactory; +import org.jboss.netty.channel.Channels; +import org.jboss.netty.channel.ExceptionEvent; +import org.jboss.netty.channel.FixedReceiveBufferSizePredictorFactory; +import org.jboss.netty.channel.ReceiveBufferSizePredictorFactory; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; import org.jboss.netty.channel.socket.oio.OioServerSocketChannelFactory; import org.jboss.netty.handler.codec.http.HttpChunkAggregator; @@ -56,7 +74,15 @@ import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.*; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING_SERVER; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_RECEIVE_BUFFER_SIZE; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_SEND_BUFFER_SIZE; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_KEEP_ALIVE; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_NO_DELAY; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_REUSE_ADDRESS; +import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; /** @@ -285,7 +311,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent lastException = new AtomicReference<>(); diff --git a/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java b/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java index 10008c76a54..c291e591dc1 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java +++ b/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java @@ -1,12 +1,37 @@ package org.elasticsearch.http.netty.pipelining; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; -import org.jboss.netty.channel.*; -import org.jboss.netty.handler.codec.http.DefaultHttpRequest; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// this file is from netty-http-pipelining, under apache 2.0 license +// see github.com/typesafehub/netty-http-pipelining + +import org.jboss.netty.channel.ChannelEvent; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.Channels; +import org.jboss.netty.channel.MessageEvent; +import org.jboss.netty.channel.SimpleChannelHandler; import org.jboss.netty.handler.codec.http.HttpRequest; -import java.util.*; +import java.util.Comparator; +import java.util.PriorityQueue; +import java.util.Queue; /** * Implements HTTP pipelining ordering, ensuring that responses are completely served in the same order as their diff --git a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java b/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java index 6b713a08020..2485b7082bd 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java +++ b/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java @@ -1,6 +1,32 @@ package org.elasticsearch.http.netty.pipelining; -import org.jboss.netty.channel.*; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// this file is from netty-http-pipelining, under apache 2.0 license +// see github.com/typesafehub/netty-http-pipelining + +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelEvent; +import org.jboss.netty.channel.ChannelFuture; +import org.jboss.netty.channel.Channels; +import org.jboss.netty.channel.DownstreamMessageEvent; /** * Permits downstream channel events to be ordered and signalled as to whether more are to come for a given sequence. diff --git a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedUpstreamMessageEvent.java b/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedUpstreamMessageEvent.java index 7343b29b6c5..cc47b5be320 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedUpstreamMessageEvent.java +++ b/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedUpstreamMessageEvent.java @@ -1,5 +1,27 @@ package org.elasticsearch.http.netty.pipelining; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// this file is from netty-http-pipelining, under apache 2.0 license +// see github.com/typesafehub/netty-http-pipelining + import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.UpstreamMessageEvent; diff --git a/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java b/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java index 8244f633939..b155a436108 100644 --- a/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java +++ b/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java @@ -22,7 +22,6 @@ package org.elasticsearch.index; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; /** * @@ -50,4 +49,4 @@ public abstract class AbstractIndexComponent implements IndexComponent { public IndexSettings getIndexSettings() { return indexSettings; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java b/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java index f4ddd75ba85..1ee1f1cc4a9 100644 --- a/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java +++ b/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java @@ -24,9 +24,9 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardState; -import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.ShardId; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index 231baaefaab..68e7b2672fb 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -38,7 +38,11 @@ import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.mapper.MapperRegistry; import java.io.IOException; -import java.util.*; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.function.BiFunction; import java.util.function.Consumer; diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 92ca00231b5..100b8b7ae81 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -22,6 +22,7 @@ package org.elasticsearch.index; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.IOUtils; import org.elasticsearch.cluster.metadata.AliasMetaData; @@ -29,7 +30,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -40,6 +40,7 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.query.QueryCache; +import org.elasticsearch.index.engine.EngineClosedException; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -48,13 +49,21 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.index.shard.IndexSearcherWrapper; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShadowIndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; +import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.AliasFilterParsingException; import org.elasticsearch.indices.InvalidAliasNameException; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.IOException; @@ -73,7 +82,7 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; /** * */ -public final class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable{ +public final class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable { private final IndexEventListener eventListener; private final AnalysisService analysisService; @@ -93,7 +102,6 @@ public final class IndexService extends AbstractIndexComponent implements IndexC private final AtomicBoolean deleted = new AtomicBoolean(false); private final IndexSettings indexSettings; - @Inject public IndexService(IndexSettings indexSettings, NodeEnvironment nodeEnv, SimilarityService similarityService, ShardStoreDeleter shardStoreDeleter, @@ -146,7 +154,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC */ @Nullable public IndexShard getShardOrNull(int shardId) { - return shards.get(shardId); + return shards.get(shardId); } /** @@ -160,13 +168,17 @@ public final class IndexService extends AbstractIndexComponent implements IndexC return indexShard; } - public Set shardIds() { return shards.keySet(); } + public Set shardIds() { + return shards.keySet(); + } public IndexCache cache() { return indexCache; } - public IndexFieldDataService fieldData() { return indexFieldData; } + public IndexFieldDataService fieldData() { + return indexFieldData; + } public AnalysisService analysisService() { return this.analysisService; @@ -207,7 +219,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC private long getAvgShardSizeInBytes() throws IOException { long sum = 0; int count = 0; - for(IndexShard indexShard : this) { + for (IndexShard indexShard : this) { sum += indexShard.store().stats().sizeInBytes(); count++; } @@ -254,17 +266,17 @@ public final class IndexService extends AbstractIndexComponent implements IndexC // TODO: we should, instead, hold a "bytes reserved" of how large we anticipate this shard will be, e.g. for a shard // that's being relocated/replicated we know how large it will become once it's done copying: // Count up how many shards are currently on each data path: - Map dataPathToShardCount = new HashMap<>(); - for(IndexShard shard : this) { + Map dataPathToShardCount = new HashMap<>(); + for (IndexShard shard : this) { Path dataPath = shard.shardPath().getRootStatePath(); Integer curCount = dataPathToShardCount.get(dataPath); if (curCount == null) { curCount = 0; } - dataPathToShardCount.put(dataPath, curCount+1); + dataPathToShardCount.put(dataPath, curCount + 1); } path = ShardPath.selectNewPathForShard(nodeEnv, shardId, this.indexSettings, routing.getExpectedShardSize() == ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE ? getAvgShardSizeInBytes() : routing.getExpectedShardSize(), - dataPathToShardCount); + dataPathToShardCount); logger.debug("{} creating using a new path [{}]", shardId, path); } else { logger.debug("{} creating using an existing path [{}]", shardId, path); @@ -277,7 +289,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC logger.debug("creating shard_id {}", shardId); // if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary. final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false || - (primary && IndexMetaData.isOnSharedFilesystem(indexSettings)); + (primary && IndexMetaData.isOnSharedFilesystem(indexSettings)); store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> nodeServicesProvider.getIndicesQueryCache().onClose(shardId))); if (useShadowEngine(primary, indexSettings)) { indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldData, engineFactory, eventListener, searcherWrapper, nodeServicesProvider); @@ -288,6 +300,10 @@ public final class IndexService extends AbstractIndexComponent implements IndexC eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created"); eventListener.afterIndexShardCreated(indexShard); indexShard.updateRoutingEntry(routing, true); + if (shards.isEmpty() && this.indexSettings.getTranslogSyncInterval().millis() != 0) { + ThreadPool threadPool = nodeServicesProvider.getThreadPool(); + new AsyncTranslogFSync(this, threadPool).schedule(); // kick this off if we are the first shard in this service. + } shards = newMapBuilder(shards).put(shardId.id(), indexShard).immutableMap(); success = true; return indexShard; @@ -443,25 +459,26 @@ public final class IndexService extends AbstractIndexComponent implements IndexC } @Override - public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) { if (shardId != null) { final IndexShard shard = indexService.getShardOrNull(shardId.id()); if (shard != null) { - shard.fieldData().onCache(shardId, fieldNames, fieldDataType, ramUsage); + shard.fieldData().onCache(shardId, fieldName, fieldDataType, ramUsage); } } } @Override - public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { if (shardId != null) { final IndexShard shard = indexService.getShardOrNull(shardId.id()); if (shard != null) { - shard.fieldData().onRemoval(shardId, fieldNames, fieldDataType, wasEvicted, sizeInBytes); + shard.fieldData().onRemoval(shardId, fieldName, fieldDataType, wasEvicted, sizeInBytes); } } } } + /** * Returns the filter associated with listed filtering aliases. *

@@ -556,5 +573,57 @@ public final class IndexService extends AbstractIndexComponent implements IndexC return indexStore; } // pkg private for testing + private void maybeFSyncTranslogs() { + if (indexSettings.getTranslogDurability() == Translog.Durability.ASYNC) { + for (IndexShard shard : this.shards.values()) { + try { + Translog translog = shard.getTranslog(); + if (translog.syncNeeded()) { + translog.sync(); + } + } catch (EngineClosedException | AlreadyClosedException ex) { + // fine - continue; + } catch (IOException e) { + logger.warn("failed to sync translog", e); + } + } + } + } + + + /** + * FSyncs the translog for all shards of this index in a defined interval. + */ + final static class AsyncTranslogFSync implements Runnable { + private final IndexService indexService; + private final ThreadPool threadPool; + + AsyncTranslogFSync(IndexService indexService, ThreadPool threadPool) { + this.indexService = indexService; + this.threadPool = threadPool; + } + + boolean mustRun() { + // don't re-schedule if its closed or if we dont' have a single shard here..., we are done + return (indexService.closed.get() || indexService.shards.isEmpty()) == false; + } + + void schedule() { + threadPool.schedule(indexService.getIndexSettings().getTranslogSyncInterval(), ThreadPool.Names.SAME, AsyncTranslogFSync.this); + } + + @Override + public void run() { + if (mustRun()) { + threadPool.executor(ThreadPool.Names.FLUSH).execute(() -> { + indexService.maybeFSyncTranslogs(); + if (mustRun()) { + schedule(); + } + }); + } + } + } + } diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index f0e06ea0bc7..772fb053cda 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -25,12 +25,16 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.index.translog.Translog; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Locale; import java.util.function.Consumer; import java.util.function.Predicate; @@ -48,6 +52,9 @@ public final class IndexSettings { public static final String QUERY_STRING_ANALYZE_WILDCARD = "indices.query.query_string.analyze_wildcard"; public static final String QUERY_STRING_ALLOW_LEADING_WILDCARD = "indices.query.query_string.allowLeadingWildcard"; public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields"; + public static final String INDEX_TRANSLOG_SYNC_INTERVAL = "index.translog.sync_interval"; + public static final String INDEX_TRANSLOG_DURABILITY = "index.translog.durability"; + private final String uuid; private final List> updateListeners; private final Index index; @@ -67,6 +74,8 @@ public final class IndexSettings { private final boolean queryStringAllowLeadingWildcard; private final boolean defaultAllowUnmappedFields; private final Predicate indexNameMatcher; + private volatile Translog.Durability durability; + private final TimeValue syncInterval; /** * Returns the default search field for this index. @@ -127,7 +136,7 @@ public final class IndexSettings { public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection> updateListeners, final Predicate indexNameMatcher) { this.nodeSettings = nodeSettings; this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build(); - this.updateListeners = Collections.unmodifiableList(new ArrayList<>(updateListeners)); + this.updateListeners = Collections.unmodifiableList( new ArrayList<>(updateListeners)); this.index = new Index(indexMetaData.getIndex()); version = Version.indexCreated(settings); uuid = settings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); @@ -144,6 +153,10 @@ public final class IndexSettings { this.parseFieldMatcher = new ParseFieldMatcher(settings); this.defaultAllowUnmappedFields = settings.getAsBoolean(ALLOW_UNMAPPED, true); this.indexNameMatcher = indexNameMatcher; + final String value = settings.get(INDEX_TRANSLOG_DURABILITY, Translog.Durability.REQUEST.name()); + this.durability = getFromSettings(settings, Translog.Durability.REQUEST); + syncInterval = settings.getAsTime(INDEX_TRANSLOG_SYNC_INTERVAL, TimeValue.timeValueSeconds(5)); + assert indexNameMatcher.test(indexMetaData.getIndex()); } @@ -295,6 +308,11 @@ public final class IndexSettings { logger.warn("failed to refresh index settings for [{}]", e, mergedSettings); } } + try { + updateSettings(mergedSettings); + } catch (Exception e) { + logger.warn("failed to refresh index settings for [{}]", e, mergedSettings); + } return true; } @@ -304,4 +322,34 @@ public final class IndexSettings { List> getUpdateListeners() { // for testing return updateListeners; } + + /** + * Returns the translog durability for this index. + */ + public Translog.Durability getTranslogDurability() { + return durability; + } + + private Translog.Durability getFromSettings(Settings settings, Translog.Durability defaultValue) { + final String value = settings.get(INDEX_TRANSLOG_DURABILITY, defaultValue.name()); + try { + return Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)); + } catch (IllegalArgumentException ex) { + logger.warn("Can't apply {} illegal value: {} using {} instead, use one of: {}", INDEX_TRANSLOG_DURABILITY, value, defaultValue, Arrays.toString(Translog.Durability.values())); + return defaultValue; + } + } + + private void updateSettings(Settings settings) { + final Translog.Durability durability = getFromSettings(settings, this.durability); + if (durability != this.durability) { + logger.info("updating durability from [{}] to [{}]", this.durability, durability); + this.durability = durability; + } + } + + public TimeValue getTranslogSyncInterval() { + return syncInterval; + } + } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java index a2c65c6441d..43c9af672d1 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -69,7 +69,14 @@ import java.io.IOException; import java.io.Reader; import java.nio.charset.StandardCharsets; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import static java.util.Collections.unmodifiableMap; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 86c06dbe54f..c833f41457e 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -29,7 +29,12 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory; import org.elasticsearch.index.analysis.compound.HyphenationCompoundWordTokenFilterFactory; -import org.elasticsearch.indices.analysis.*; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.indices.analysis.HunspellService; +import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; +import org.elasticsearch.indices.analysis.PreBuiltCharFilters; +import org.elasticsearch.indices.analysis.PreBuiltTokenFilters; +import org.elasticsearch.indices.analysis.PreBuiltTokenizers; import java.io.Closeable; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java index 261add4e220..a042bbcb9f3 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java @@ -21,16 +21,13 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.core.StringFieldMapper; import java.io.Closeable; -import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -81,7 +78,7 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable * and 100 afterwards so we override the positionIncrementGap if it * doesn't match here. */ - int overridePositionIncrementGap = StringFieldMapper.Defaults.positionIncrementGap(indexSettings.getIndexVersionCreated()); + int overridePositionIncrementGap = StringFieldMapper.Defaults.POSITION_INCREMENT_GAP; if (analyzerFactory instanceof CustomAnalyzerProvider) { ((CustomAnalyzerProvider) analyzerFactory).build(this); /* diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java index 047e278f953..3c4768279f2 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java @@ -74,7 +74,7 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider analyzers; - private final Analyzer defaultAnalyzer; + private final Map analyzers; - public FieldNameAnalyzer(Analyzer defaultAnalyzer) { - this(new CopyOnWriteHashMap<>(), defaultAnalyzer); - } - - public FieldNameAnalyzer(Map analyzers, Analyzer defaultAnalyzer) { + public FieldNameAnalyzer(Map analyzers) { super(Analyzer.PER_FIELD_REUSE_STRATEGY); this.analyzers = CopyOnWriteHashMap.copyOf(analyzers); - this.defaultAnalyzer = defaultAnalyzer; } public Map analyzers() { return analyzers; } - public Analyzer defaultAnalyzer() { - return defaultAnalyzer; - } - @Override protected Analyzer getWrappedAnalyzer(String fieldName) { Analyzer analyzer = analyzers.get(fieldName); @@ -64,18 +51,4 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper { // Fields need to be explicitly added throw new IllegalArgumentException("Field [" + fieldName + "] has no associated analyzer"); } - - /** - * Return a new instance that contains the union of this and of the provided analyzers. - */ - public FieldNameAnalyzer copyAndAddAll(Stream> mappers) { - CopyOnWriteHashMap result = analyzers.copyAndPutAll(mappers.map((e) -> { - if (e.getValue() == null) { - return new AbstractMap.SimpleImmutableEntry<>(e.getKey(), defaultAnalyzer); - } - return e; - })); - return new FieldNameAnalyzer(result, defaultAnalyzer); - } - } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java index db8ccfec611..a012db3b785 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java @@ -20,8 +20,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/KeepTypesFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/KeepTypesFilterFactory.java index 4a2a97e8892..1f44657a897 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/KeepTypesFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/KeepTypesFilterFactory.java @@ -21,8 +21,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.core.TypeTokenFilter; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.util.Arrays; @@ -32,7 +32,7 @@ import java.util.Set; /** * A {@link TokenFilterFactory} for {@link TypeTokenFilter}. This filter only * keep tokens that are contained in the set configured via - * {@value #KEEP_TYPES_KEY} setting. + * {@value #KEEP_TYPES_KEY} setting. *

* Configuration options: *

    diff --git a/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java index 56a62624af9..82b8df70741 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java @@ -24,8 +24,8 @@ import org.apache.lucene.analysis.miscellaneous.KeepWordFilter; import org.apache.lucene.analysis.miscellaneous.Lucene43KeepWordFilter; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.Version; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; /** diff --git a/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java index ebd16cbe2e7..b787ed64090 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java @@ -22,8 +22,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter; import org.apache.lucene.analysis.util.CharArraySet; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java index 7262b0ad9da..d926371ca48 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.NumericTokenStream; import java.io.IOException; -import java.io.Reader; /** * @@ -37,4 +36,4 @@ public class NumericLongTokenizer extends NumericTokenizer { protected void setValue(NumericTokenStream tokenStream, String value) { tokenStream.setLongValue(Long.parseLong(value)); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java index 43378411ae4..d5da62f67b1 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java @@ -20,8 +20,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.pattern.PatternTokenizer; @@ -53,4 +53,4 @@ public final class PatternAnalyzer extends Analyzer { } return new TokenStreamComponents(tokenizer, stream); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PatternCaptureGroupTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/PatternCaptureGroupTokenFilterFactory.java index d2d6aaeaf59..006fb447368 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/PatternCaptureGroupTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/PatternCaptureGroupTokenFilterFactory.java @@ -22,8 +22,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.pattern.PatternCaptureGroupTokenFilter; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.util.regex.Pattern; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PatternReplaceTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/PatternReplaceTokenFilterFactory.java index b2441d547cf..a6d22b2be95 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/PatternReplaceTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/PatternReplaceTokenFilterFactory.java @@ -21,9 +21,9 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.pattern.PatternReplaceFilter; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.util.regex.Pattern; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java index 4ce0bee7a2c..996cc93cd20 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java @@ -22,14 +22,14 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.en.EnglishPossessiveFilter; -import org.apache.lucene.analysis.tr.TurkishLowerCaseFilter; import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.StopFilter; +import org.apache.lucene.analysis.en.EnglishPossessiveFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; -import org.apache.lucene.analysis.standard.StandardTokenizer; import org.apache.lucene.analysis.standard.StandardFilter; +import org.apache.lucene.analysis.standard.StandardTokenizer; import org.apache.lucene.analysis.standard.std40.StandardTokenizer40; +import org.apache.lucene.analysis.tr.TurkishLowerCaseFilter; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.Version; @@ -40,7 +40,7 @@ import org.apache.lucene.util.Version; * stemmer is the part of the class name before "Stemmer", e.g., the stemmer in * {@link org.tartarus.snowball.ext.EnglishStemmer} is named "English". - * @deprecated (3.1) Use the language-specific analyzer in modules/analysis instead. + * @deprecated (3.1) Use the language-specific analyzer in modules/analysis instead. * This analyzer WAS removed in Lucene 5.0 */ @Deprecated diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StandardAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/StandardAnalyzerProvider.java index 3c79abd6fb6..c0d527f1b11 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StandardAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StandardAnalyzerProvider.java @@ -23,8 +23,8 @@ import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.util.CharArraySet; import org.elasticsearch.Version; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; /** diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java index a6cfe91cbf4..156ad1ff07e 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.StopFilter; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StemmerOverrideTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/StemmerOverrideTokenFilterFactory.java index 11f6a28ec82..66643cc2396 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StemmerOverrideTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StemmerOverrideTokenFilterFactory.java @@ -23,8 +23,8 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.StemmerOverrideFilter; import org.apache.lucene.analysis.miscellaneous.StemmerOverrideFilter.StemmerOverrideMap; import org.elasticsearch.common.Strings; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.io.IOException; @@ -41,7 +41,7 @@ public class StemmerOverrideTokenFilterFactory extends AbstractTokenFilterFactor if (rules == null) { throw new IllegalArgumentException("stemmer override filter requires either `rules` or `rules_path` to be configured"); } - + StemmerOverrideFilter.Builder builder = new StemmerOverrideFilter.Builder(false); parseRules(rules, builder, "=>"); overrideMap = builder.build(); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java index 7f8b65676bf..1154f9b0f79 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java @@ -57,7 +57,30 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; -import org.tartarus.snowball.ext.*; +import org.tartarus.snowball.ext.ArmenianStemmer; +import org.tartarus.snowball.ext.BasqueStemmer; +import org.tartarus.snowball.ext.CatalanStemmer; +import org.tartarus.snowball.ext.DanishStemmer; +import org.tartarus.snowball.ext.DutchStemmer; +import org.tartarus.snowball.ext.EnglishStemmer; +import org.tartarus.snowball.ext.FinnishStemmer; +import org.tartarus.snowball.ext.FrenchStemmer; +import org.tartarus.snowball.ext.German2Stemmer; +import org.tartarus.snowball.ext.GermanStemmer; +import org.tartarus.snowball.ext.HungarianStemmer; +import org.tartarus.snowball.ext.IrishStemmer; +import org.tartarus.snowball.ext.ItalianStemmer; +import org.tartarus.snowball.ext.KpStemmer; +import org.tartarus.snowball.ext.LithuanianStemmer; +import org.tartarus.snowball.ext.LovinsStemmer; +import org.tartarus.snowball.ext.NorwegianStemmer; +import org.tartarus.snowball.ext.PorterStemmer; +import org.tartarus.snowball.ext.PortugueseStemmer; +import org.tartarus.snowball.ext.RomanianStemmer; +import org.tartarus.snowball.ext.RussianStemmer; +import org.tartarus.snowball.ext.SpanishStemmer; +import org.tartarus.snowball.ext.SwedishStemmer; +import org.tartarus.snowball.ext.TurkishStemmer; /** */ @@ -138,7 +161,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { return new FrenchLightStemFilter(tokenStream); } else if ("minimal_french".equalsIgnoreCase(language) || "minimalFrench".equalsIgnoreCase(language)) { return new FrenchMinimalStemFilter(tokenStream); - + // Galician stemmers } else if ("galician".equalsIgnoreCase(language)) { return new GalicianStemFilter(tokenStream); @@ -168,7 +191,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { } else if ("indonesian".equalsIgnoreCase(language)) { return new IndonesianStemFilter(tokenStream); - + // Irish stemmer } else if ("irish".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new IrishStemmer()); @@ -192,8 +215,8 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { return new NorwegianLightStemFilter(tokenStream); } else if ("minimal_norwegian".equalsIgnoreCase(language) || "minimalNorwegian".equals(language)) { return new NorwegianMinimalStemFilter(tokenStream); - - // Norwegian (Nynorsk) stemmers + + // Norwegian (Nynorsk) stemmers } else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) { return new NorwegianLightStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); } else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) { @@ -223,7 +246,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { return new SnowballFilter(tokenStream, new SpanishStemmer()); } else if ("light_spanish".equalsIgnoreCase(language) || "lightSpanish".equalsIgnoreCase(language)) { return new SpanishLightStemFilter(tokenStream); - + // Sorani Kurdish stemmer } else if ("sorani".equalsIgnoreCase(language)) { return new SoraniStemFilter(tokenStream); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java index 1f58c8a3d71..3696462c4f5 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java @@ -28,9 +28,9 @@ import org.apache.lucene.analysis.synonym.SolrSynonymParser; import org.apache.lucene.analysis.synonym.SynonymFilter; import org.apache.lucene.analysis.synonym.SynonymMap; import org.apache.lucene.analysis.synonym.WordnetSynonymParser; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java index c93bfa34190..f81ac97ba90 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java @@ -21,8 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; -import java.io.Reader; - /** * */ diff --git a/core/src/main/java/org/elasticsearch/index/analysis/TruncateTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/TruncateTokenFilterFactory.java index 84359268644..0a5a30cc28f 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/TruncateTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/TruncateTokenFilterFactory.java @@ -21,8 +21,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.TruncateTokenFilter; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; /** diff --git a/core/src/main/java/org/elasticsearch/index/analysis/UniqueTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/UniqueTokenFilterFactory.java index f506a6eaab0..eec70134c3f 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/UniqueTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/UniqueTokenFilterFactory.java @@ -21,8 +21,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.UniqueTokenFilter; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; /** diff --git a/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java index ffb283e7a23..1d5a9563130 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java @@ -25,17 +25,28 @@ import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.Version; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; -import java.util.*; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedMap; +import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; -import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.*; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_ALL; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_NUMBERS; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_WORDS; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_NUMBER_PARTS; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_WORD_PARTS; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.PRESERVE_ORIGINAL; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_CASE_CHANGE; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_NUMERICS; +import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE; public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory { @@ -195,4 +206,4 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory } return new String(out, 0, writePos); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java index d0388205b1b..8d65e008f25 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java @@ -23,8 +23,8 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.compound.DictionaryCompoundWordTokenFilter; import org.apache.lucene.analysis.compound.Lucene43DictionaryCompoundWordTokenFilter; import org.apache.lucene.util.Version; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -42,11 +42,11 @@ public class DictionaryCompoundWordTokenFilterFactory extends AbstractCompoundWo @Override public TokenStream create(TokenStream tokenStream) { if (version.onOrAfter(Version.LUCENE_4_4_0)) { - return new DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, + return new DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, minSubwordSize, maxSubwordSize, onlyLongestMatch); } else { return new Lucene43DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, minSubwordSize, maxSubwordSize, onlyLongestMatch); } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java index 841ca7966d9..42a29784acc 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java @@ -24,8 +24,8 @@ import org.apache.lucene.analysis.compound.HyphenationCompoundWordTokenFilter; import org.apache.lucene.analysis.compound.Lucene43HyphenationCompoundWordTokenFilter; import org.apache.lucene.analysis.compound.hyphenation.HyphenationTree; import org.apache.lucene.util.Version; -import org.elasticsearch.env.Environment; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.xml.sax.InputSource; @@ -61,10 +61,10 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW @Override public TokenStream create(TokenStream tokenStream) { if (version.onOrAfter(Version.LUCENE_4_4_0)) { - return new HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, + return new HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, minSubwordSize, maxSubwordSize, onlyLongestMatch); } else { - return new Lucene43HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, + return new Lucene43HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, minSubwordSize, maxSubwordSize, onlyLongestMatch); } } diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index ceac3ca15c5..4e9ecf569d0 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -26,6 +26,7 @@ import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.Accountable; @@ -127,12 +128,12 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L final IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); final Weight weight = searcher.createNormalizedWeight(query, false); - final DocIdSetIterator it = weight.scorer(context); + Scorer s = weight.scorer(context); final BitSet bitSet; - if (it == null) { + if (s == null) { bitSet = null; } else { - bitSet = BitSet.of(it, context.reader().maxDoc()); + bitSet = BitSet.of(s.iterator(), context.reader().maxDoc()); } Value value = new Value(bitSet, shardId); diff --git a/core/src/main/java/org/elasticsearch/index/cache/query/QueryCacheStats.java b/core/src/main/java/org/elasticsearch/index/cache/query/QueryCacheStats.java index 947968deab0..62b8d3ba138 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/query/QueryCacheStats.java +++ b/core/src/main/java/org/elasticsearch/index/cache/query/QueryCacheStats.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.cache.query; +import org.apache.lucene.search.DocIdSet; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -27,8 +28,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.apache.lucene.search.DocIdSet; - import java.io.IOException; /** diff --git a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java index e4d86be1bda..432f81da8a9 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.index.mapper.MapperService; + import java.util.Map; /** diff --git a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java index 2c23f947475..7663a322be6 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -54,7 +54,7 @@ public class PerFieldMappingPostingFormatCodec extends Lucene54Codec { @Override public PostingsFormat getPostingsFormatForField(String field) { - final MappedFieldType indexName = mapperService.indexName(field); + final MappedFieldType indexName = mapperService.fullName(field); if (indexName == null) { logger.warn("no index mapper found for field: [{}] returning default postings format", field); } else if (indexName instanceof CompletionFieldMapper.CompletionFieldType) { diff --git a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java index 32ffbf371f4..4278eedcac0 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java +++ b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java @@ -19,7 +19,11 @@ package org.elasticsearch.index.engine; -import org.apache.lucene.index.*; +import org.apache.lucene.index.ConcurrentMergeScheduler; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.MergeScheduler; +import org.apache.lucene.index.OneMergeHelper; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.metrics.CounterMetric; @@ -144,7 +148,7 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler { @Override public MergeScheduler clone() { - // Lucene IW makes a clone internally but since we hold on to this instance + // Lucene IW makes a clone internally but since we hold on to this instance // the clone will just be the identity. return this; } diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index 212bfe309af..b096cc07d82 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -19,7 +19,18 @@ package org.elasticsearch.index.engine; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FilterLeafReader; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SegmentCommitInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.SegmentReader; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.SearcherManager; @@ -52,7 +63,12 @@ import org.elasticsearch.index.translog.Translog; import java.io.Closeable; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Condition; diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index beb93829d93..b5ec76e7f2c 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -52,7 +52,6 @@ public final class EngineConfig { private final TranslogRecoveryPerformer translogRecoveryPerformer; private final IndexSettings indexSettings; private final ByteSizeValue indexingBufferSize; - private volatile boolean compoundOnFlush = true; private long gcDeletesInMillis = DEFAULT_GC_DELETES.millis(); private volatile boolean enableGcDeletes = true; private final TimeValue flushMergesAfter; @@ -72,11 +71,6 @@ public final class EngineConfig { private final QueryCache queryCache; private final QueryCachingPolicy queryCachingPolicy; - /** - * Index setting for compound file on flush. This setting is realtime updateable. - */ - public static final String INDEX_COMPOUND_ON_FLUSH = "index.compound_on_flush"; - /** * Index setting to enable / disable deletes garbage collection. * This setting is realtime updateable @@ -121,7 +115,6 @@ public final class EngineConfig { this.similarity = similarity; this.codecService = codecService; this.eventListener = eventListener; - this.compoundOnFlush = settings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, compoundOnFlush); codecName = settings.get(EngineConfig.INDEX_CODEC_SETTING, EngineConfig.DEFAULT_CODEC_NAME); // We give IndexWriter a "huge" (256 MB) buffer, so it won't flush on its own unless the ES indexing buffer is also huge and/or // there are not too many shards allocated to this node. Instead, IndexingMemoryController periodically checks @@ -157,13 +150,6 @@ public final class EngineConfig { return indexingBufferSize; } - /** - * Returns true iff flushed segments should be written as compound file system. Defaults to true - */ - public boolean isCompoundOnFlush() { - return compoundOnFlush; - } - /** * Returns the GC deletes cycle in milliseconds. */ @@ -295,13 +281,6 @@ public final class EngineConfig { this.gcDeletesInMillis = gcDeletesInMillis; } - /** - * Sets if flushed segments should be written as compound file system. Defaults to true - */ - public void setCompoundOnFlush(boolean compoundOnFlush) { - this.compoundOnFlush = compoundOnFlush; - } - /** * Returns the {@link org.elasticsearch.index.shard.TranslogRecoveryPerformer} for this engine. This class is used * to apply transaction log operations to the engine. It encapsulates all the logic to transfer the translog entry into diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java b/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java index 3384f78433f..ac95799b3bb 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.engine; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SearcherManager; import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.index.store.Store; diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineSearcherFactory.java b/core/src/main/java/org/elasticsearch/index/engine/EngineSearcherFactory.java index a09ad622299..cc82262fd40 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineSearcherFactory.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineSearcherFactory.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.engine; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SearcherFactory; -import org.elasticsearch.index.engine.EngineConfig; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 39d898f115a..cdc83fa8ffc 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -19,8 +19,21 @@ package org.elasticsearch.index.engine; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexFormatTooOldException; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter.IndexReaderWarmer; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.LiveIndexWriterConfig; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.MultiReader; +import org.apache.lucene.index.SegmentCommitInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SearcherFactory; import org.apache.lucene.search.SearcherManager; @@ -62,7 +75,12 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; @@ -297,15 +315,6 @@ public class InternalEngine extends Engine { } } - private void updateIndexWriterSettings() { - try { - final LiveIndexWriterConfig iwc = indexWriter.getConfig(); - iwc.setUseCompoundFile(engineConfig.isCompoundOnFlush()); - } catch (AlreadyClosedException ex) { - // ignore - } - } - @Override public GetResult get(Get get, Function searcherFactory) throws EngineException { try (ReleasableLock lock = readLock.acquire()) { @@ -934,7 +943,7 @@ public class InternalEngine extends Engine { * here but with 1s poll this is only executed twice at most * in combination with the default writelock timeout*/ iwc.setWriteLockTimeout(5000); - iwc.setUseCompoundFile(this.engineConfig.isCompoundOnFlush()); + iwc.setUseCompoundFile(true); // always use compound on flush - reduces # of file-handles on refresh // Warm-up hook for newly-merged segments. Warming up segments here is better since it will be performed at the end // of the merge operation and won't slow down _refresh iwc.setMergedSegmentWarmer(new IndexReaderWarmer() { @@ -1134,20 +1143,18 @@ public class InternalEngine extends Engine { @Override protected void handleMergeException(final Directory dir, final Throwable exc) { logger.error("failed to merge", exc); - if (config().getMergeSchedulerConfig().isNotifyOnMergeFailure()) { - engineConfig.getThreadPool().generic().execute(new AbstractRunnable() { - @Override - public void onFailure(Throwable t) { - logger.debug("merge failure action rejected", t); - } + engineConfig.getThreadPool().generic().execute(new AbstractRunnable() { + @Override + public void onFailure(Throwable t) { + logger.debug("merge failure action rejected", t); + } - @Override - protected void doRun() throws Exception { - MergePolicy.MergeException e = new MergePolicy.MergeException(exc, dir); - failEngine("merge failed", e); - } - }); - } + @Override + protected void doRun() throws Exception { + MergePolicy.MergeException e = new MergePolicy.MergeException(exc, dir); + failEngine("merge failed", e); + } + }); } } @@ -1175,7 +1182,6 @@ public class InternalEngine extends Engine { public void onSettingsChanged() { mergeScheduler.refreshConfig(); - updateIndexWriterSettings(); // config().isEnableGcDeletes() or config.getGcDeletesInMillis() may have changed: maybePruneDeletedTombstones(); } diff --git a/core/src/main/java/org/elasticsearch/index/engine/RecoveryCounter.java b/core/src/main/java/org/elasticsearch/index/engine/RecoveryCounter.java index 28401496456..31fddbedfb7 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/RecoveryCounter.java +++ b/core/src/main/java/org/elasticsearch/index/engine/RecoveryCounter.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.engine; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.index.store.Store; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java index 97750cf0695..e64499e8a3c 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java @@ -19,7 +19,12 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.index.*; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index 7d2689dc157..ffa23bf56e4 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparatorSource; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; @@ -79,7 +80,7 @@ public interface IndexFieldData extends IndexCompone /** * The field name. */ - MappedFieldType.Names getFieldNames(); + String getFieldName(); /** * The field data type. @@ -139,7 +140,8 @@ public interface IndexFieldData extends IndexCompone * Get a {@link DocIdSet} that matches the inner documents. */ public DocIdSetIterator innerDocs(LeafReaderContext ctx) throws IOException { - return innerFilter.scorer(ctx); + Scorer s = innerFilter.scorer(ctx); + return s == null ? null : s.iterator(); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java index dc0db303239..7640a9be200 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java @@ -21,9 +21,7 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.util.Accountable; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.shard.ShardId; /** @@ -50,12 +48,12 @@ public interface IndexFieldDataCache { /** * Called after the fielddata is loaded during the cache phase */ - void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage); + void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage); /** * Called after the fielddata is unloaded */ - void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes); + void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes); } class None implements IndexFieldDataCache { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 80947260442..8ac0bda2f0b 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -25,16 +25,15 @@ import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData; import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData; import org.elasticsearch.index.fielddata.plain.DisabledIndexFieldData; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData; import org.elasticsearch.index.fielddata.plain.GeoPointArrayIndexFieldData; import org.elasticsearch.index.fielddata.plain.IndexIndexFieldData; import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; @@ -61,7 +60,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo public static final String FIELDDATA_CACHE_VALUE_NODE = "node"; private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = (indexProperties, fieldType, cache, breakerService, mapperService1) -> { - throw new IllegalStateException("Can't load fielddata on [" + fieldType.names().fullName() + throw new IllegalStateException("Can't load fielddata on [" + fieldType.name() + "] of index [" + indexProperties.getIndex().getName() + "] because fielddata is unsupported on fields of type [" + fieldType.fieldDataType().getType() + "]. Use doc values instead."); }; @@ -148,11 +147,11 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo private final MapperService mapperService; private static final IndexFieldDataCache.Listener DEFAULT_NOOP_LISTENER = new IndexFieldDataCache.Listener() { @Override - public void onCache(ShardId shardId, Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) { } @Override - public void onRemoval(ShardId shardId, Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { } }; private volatile IndexFieldDataCache.Listener listener = DEFAULT_NOOP_LISTENER; @@ -195,22 +194,22 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo @SuppressWarnings("unchecked") public > IFD getForField(MappedFieldType fieldType) { - final Names fieldNames = fieldType.names(); + final String fieldName = fieldType.name(); final FieldDataType type = fieldType.fieldDataType(); if (type == null) { - throw new IllegalArgumentException("found no fielddata type for field [" + fieldNames.fullName() + "]"); + throw new IllegalArgumentException("found no fielddata type for field [" + fieldName + "]"); } final boolean docValues = fieldType.hasDocValues(); IndexFieldData.Builder builder = null; String format = type.getFormat(indexSettings.getSettings()); if (format != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(format) && !docValues) { - logger.warn("field [" + fieldNames.fullName() + "] has no doc values, will use default field data format"); + logger.warn("field [" + fieldName + "] has no doc values, will use default field data format"); format = null; } if (format != null) { builder = buildersByTypeAndFormat.get(Tuple.tuple(type.getType(), format)); if (builder == null) { - logger.warn("failed to find format [" + format + "] for field [" + fieldNames.fullName() + "], will use default"); + logger.warn("failed to find format [" + format + "] for field [" + fieldName + "], will use default"); } } if (builder == null && docValues) { @@ -220,24 +219,24 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo builder = buildersByType.get(type.getType()); } if (builder == null) { - throw new IllegalArgumentException("failed to find field data builder for field " + fieldNames.fullName() + ", and type " + type.getType()); + throw new IllegalArgumentException("failed to find field data builder for field " + fieldName + ", and type " + type.getType()); } IndexFieldDataCache cache; synchronized (this) { - cache = fieldDataCaches.get(fieldNames.indexName()); + cache = fieldDataCaches.get(fieldName); if (cache == null) { // we default to node level cache, which in turn defaults to be unbounded // this means changing the node level settings is simple, just set the bounds there String cacheType = type.getSettings().get("cache", indexSettings.getSettings().get(FIELDDATA_CACHE_KEY, FIELDDATA_CACHE_VALUE_NODE)); if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) { - cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldNames, type); + cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName, type); } else if ("none".equals(cacheType)){ cache = new IndexFieldDataCache.None(); } else { - throw new IllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldNames.fullName() + "]"); + throw new IllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldName + "]"); } - fieldDataCaches.put(fieldNames.indexName(), cache); + fieldDataCaches.put(fieldName, cache); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/RamAccountingTermsEnum.java b/core/src/main/java/org/elasticsearch/index/fielddata/RamAccountingTermsEnum.java index 15aa961294c..57b388b89c0 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/RamAccountingTermsEnum.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/RamAccountingTermsEnum.java @@ -21,8 +21,8 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.index.fielddata.plain.AbstractIndexFieldData; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.index.fielddata.plain.AbstractIndexFieldData; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java index e646364ef13..bb31df75348 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java @@ -24,7 +24,6 @@ import org.apache.lucene.util.Accountable; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.shard.ShardId; import java.util.Map; @@ -52,16 +51,15 @@ public class ShardFieldData implements IndexFieldDataCache.Listener { } @Override - public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) { totalMetric.inc(ramUsage.ramBytesUsed()); - String keyFieldName = fieldNames.indexName(); - CounterMetric total = perFieldTotals.get(keyFieldName); + CounterMetric total = perFieldTotals.get(fieldName); if (total != null) { total.inc(ramUsage.ramBytesUsed()); } else { total = new CounterMetric(); total.inc(ramUsage.ramBytesUsed()); - CounterMetric prev = perFieldTotals.putIfAbsent(keyFieldName, total); + CounterMetric prev = perFieldTotals.putIfAbsent(fieldName, total); if (prev != null) { prev.inc(ramUsage.ramBytesUsed()); } @@ -69,15 +67,14 @@ public class ShardFieldData implements IndexFieldDataCache.Listener { } @Override - public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { if (wasEvicted) { evictionsMetric.inc(); } if (sizeInBytes != -1) { totalMetric.dec(sizeInBytes); - String keyFieldName = fieldNames.indexName(); - CounterMetric total = perFieldTotals.get(keyFieldName); + CounterMetric total = perFieldTotals.get(fieldName); if (total != null) { total.dec(sizeInBytes); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java index 157c6fda97c..c35d59fc692 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java @@ -19,7 +19,12 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.util.*; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.InPlaceMergeSorter; +import org.apache.lucene.util.RamUsageEstimator; +import org.apache.lucene.util.Sorter; import java.util.Arrays; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index 1789c3537e6..51f8f2b42bd 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -19,16 +19,16 @@ package org.elasticsearch.index.fielddata.fieldcomparator; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SortField; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.BitSet; +import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; @@ -80,13 +80,13 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException { - assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName()); + assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final boolean sortMissingLast = sortMissingLast(missingValue) ^ reversed; final BytesRef missingBytes = (BytesRef) missingObject(missingValue, reversed); if (indexFieldData instanceof IndexOrdinalsFieldData) { return new FieldComparator.TermOrdValComparator(numHits, null, sortMissingLast) { - + @Override protected SortedDocValues getSortedDocValues(LeafReaderContext context, String field) throws IOException { final RandomAccessOrds values = ((IndexOrdinalsFieldData) indexFieldData).load(context).getOrdinalsValues(); @@ -104,7 +104,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat return new ReplaceMissing(selectedValues, missingBytes); } } - + @Override public void setScorer(Scorer scorer) { BytesRefFieldComparatorSource.this.setScorer(scorer); @@ -148,10 +148,10 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat }; } - - /** - * A view of a SortedDocValues where missing values - * are replaced with the specified term + + /** + * A view of a SortedDocValues where missing values + * are replaced with the specified term */ // TODO: move this out if we need it for other reasons static class ReplaceMissing extends SortedDocValues { @@ -159,7 +159,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat final int substituteOrd; final BytesRef substituteTerm; final boolean exists; - + ReplaceMissing(SortedDocValues in, BytesRef term) { this.in = in; this.substituteTerm = term; @@ -204,7 +204,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat return in.lookupOrd(ord); } } - + // we let termsenum etc fall back to the default implementation } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index 5391345e793..4684399a23d 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -65,7 +65,7 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException { - assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName()); + assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final double dMissingValue = (Double) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index 15628513e80..ba9b031cede 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -57,7 +57,7 @@ public class FloatValuesComparatorSource extends IndexFieldData.XFieldComparator @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException { - assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName()); + assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final float dMissingValue = (Float) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index 15961ffedce..b2fd25e5445 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.fielddata.fieldcomparator; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.SortField; @@ -57,7 +56,7 @@ public class LongValuesComparatorSource extends IndexFieldData.XFieldComparatorS @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException { - assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName()); + assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final Long dMissingValue = (Long) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java index e6f1d24f1e3..dc5041d24ef 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java @@ -27,10 +27,10 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; +import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -64,12 +64,12 @@ public enum GlobalOrdinalsBuilder { if (logger.isDebugEnabled()) { logger.debug( "Global-ordinals[{}][{}] took {} ms", - indexFieldData.getFieldNames().fullName(), + indexFieldData.getFieldName(), ordinalMap.getValueCount(), TimeValue.nsecToMSec(System.nanoTime() - startTimeNS) ); } - return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldNames(), + return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldName(), indexFieldData.getFieldDataType(), atomicFD, ordinalMap, memorySizeInBytes ); } @@ -103,7 +103,7 @@ public enum GlobalOrdinalsBuilder { subs[i] = atomicFD[i].getOrdinalsValues(); } final OrdinalMap ordinalMap = OrdinalMap.build(null, subs, PackedInts.DEFAULT); - return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldNames(), + return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldName(), indexFieldData.getFieldDataType(), atomicFD, ordinalMap, 0 ); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java index 4a8bd78bb4e..5e1a2b57401 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java @@ -40,13 +40,13 @@ import java.util.Collections; */ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent implements IndexOrdinalsFieldData, Accountable { - private final MappedFieldType.Names fieldNames; + private final String fieldName; private final FieldDataType fieldDataType; private final long memorySizeInBytes; - protected GlobalOrdinalsIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, long memorySizeInBytes) { + protected GlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, long memorySizeInBytes) { super(indexSettings); - this.fieldNames = fieldNames; + this.fieldName = fieldName; this.fieldDataType = fieldDataType; this.memorySizeInBytes = memorySizeInBytes; } @@ -67,8 +67,8 @@ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponen } @Override - public MappedFieldType.Names getFieldNames() { - return fieldNames; + public String getFieldName() { + return fieldName; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java index fc1b6db9758..297c8b0f30c 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java @@ -37,8 +37,8 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel private final Atomic[] atomicReaders; - InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) { - super(indexSettings, fieldNames, fieldDataType, memorySizeInBytes); + InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) { + super(indexSettings, fieldName, fieldDataType, memorySizeInBytes); this.atomicReaders = new Atomic[segmentAfd.length]; for (int i = 0; i < segmentAfd.length; i++) { atomicReaders[i] = new Atomic(segmentAfd[i], ordinalMap, i); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java index 3b66adfee9a..d17a9fd07f1 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java @@ -23,7 +23,13 @@ import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.util.*; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BitSet; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefIterator; +import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.LongsRef; +import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.packed.GrowableWriter; import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PagedGrowableWriter; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java index 0b86b17f211..3d4b6536b6c 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java @@ -19,20 +19,19 @@ package org.elasticsearch.index.fielddata.plain; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; @@ -41,8 +40,8 @@ import java.io.IOException; public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoPointFieldData { - AbstractGeoPointDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) { - super(index, fieldNames, fieldDataType); + AbstractGeoPointDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) { + super(index, fieldName, fieldDataType); } @Override @@ -56,8 +55,8 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie public static class GeoPointDVIndexFieldData extends AbstractGeoPointDVIndexFieldData { final boolean indexCreatedBefore2x; - public GeoPointDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType, final boolean indexCreatedBefore2x) { - super(index, fieldNames, fieldDataType); + public GeoPointDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType, final boolean indexCreatedBefore2x) { + super(index, fieldName, fieldDataType); this.indexCreatedBefore2x = indexCreatedBefore2x; } @@ -65,9 +64,9 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie public AtomicGeoPointFieldData load(LeafReaderContext context) { try { if (indexCreatedBefore2x) { - return new GeoPointLegacyDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldNames.indexName())); + return new GeoPointLegacyDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldName)); } - return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldNames.indexName())); + return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldName)); } catch (IOException e) { throw new IllegalStateException("Cannot load doc values", e); } @@ -84,8 +83,8 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore breaker - return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.names(), fieldType.fieldDataType(), + return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name(), fieldType.fieldDataType(), indexSettings.getIndexVersionCreated().before(Version.V_2_2_0)); } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java index 8f0f2798c05..151ee92058d 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java @@ -26,8 +26,11 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.fielddata.AtomicFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.RamAccountingTermsEnum; import java.io.IOException; @@ -35,20 +38,20 @@ import java.io.IOException; */ public abstract class AbstractIndexFieldData extends AbstractIndexComponent implements IndexFieldData { - private final MappedFieldType.Names fieldNames; + private final String fieldName; protected final FieldDataType fieldDataType; protected final IndexFieldDataCache cache; - public AbstractIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) { + public AbstractIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache) { super(indexSettings); - this.fieldNames = fieldNames; + this.fieldName = fieldName; this.fieldDataType = fieldDataType; this.cache = cache; } @Override - public MappedFieldType.Names getFieldNames() { - return this.fieldNames; + public String getFieldName() { + return this.fieldName; } @Override @@ -58,12 +61,12 @@ public abstract class AbstractIndexFieldData extends @Override public void clear() { - cache.clear(fieldNames.indexName()); + cache.clear(fieldName); } @Override public FD load(LeafReaderContext context) { - if (context.reader().getFieldInfos().fieldInfo(fieldNames.indexName()) == null) { + if (context.reader().getFieldInfos().fieldInfo(fieldName) == null) { // Some leaf readers may be wrapped and report different set of fields and use the same cache key. // If a field can't be found then it doesn't mean it isn't there, // so if a field doesn't exist then we don't cache it and just return an empty field data instance. diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java index 3b1629f7882..a8114c41f9b 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java @@ -26,9 +26,11 @@ import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; -import org.elasticsearch.index.mapper.MappedFieldType.Names; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; @@ -89,8 +91,8 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData groups = fieldDataType.getSettings().getGroups("filter"); frequency = groups.get("frequency"); regex = groups.get("regex"); @@ -66,7 +73,7 @@ public abstract class AbstractIndexOrdinalsFieldData extends AbstractIndexFieldD } boolean fieldFound = false; for (LeafReaderContext context : indexReader.leaves()) { - if (context.reader().getFieldInfos().fieldInfo(getFieldNames().indexName()) != null) { + if (context.reader().getFieldInfos().fieldInfo(getFieldName()) != null) { fieldFound = true; break; } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java index 2e03b74a41f..c2a50942566 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java @@ -25,18 +25,17 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; public class BinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData { - public BinaryDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) { - super(index, fieldNames, fieldDataType); + public BinaryDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) { + super(index, fieldName, fieldDataType); } @Override public BinaryDVAtomicFieldData load(LeafReaderContext context) { - return new BinaryDVAtomicFieldData(context.reader(), fieldNames.indexName()); + return new BinaryDVAtomicFieldData(context.reader(), fieldName); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java index efe8bc97a30..988ecd61d65 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.fielddata.plain; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -29,7 +29,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; @@ -38,8 +37,8 @@ import java.io.IOException; public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData { - public BytesBinaryDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) { - super(index, fieldNames, fieldDataType); + public BytesBinaryDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) { + super(index, fieldName, fieldDataType); } @Override @@ -50,7 +49,7 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme @Override public BytesBinaryDVAtomicFieldData load(LeafReaderContext context) { try { - return new BytesBinaryDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldNames.indexName())); + return new BytesBinaryDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldName)); } catch (IOException e) { throw new IllegalStateException("Cannot load doc values", e); } @@ -67,8 +66,8 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore breaker - final Names fieldNames = fieldType.names(); - return new BytesBinaryDVIndexFieldData(indexSettings.getIndex(), fieldNames, fieldType.fieldDataType()); + final String fieldName = fieldType.name(); + return new BytesBinaryDVIndexFieldData(indexSettings.getIndex(), fieldName, fieldType.fieldDataType()); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java index 58a195057bb..86daaf1a252 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java @@ -21,13 +21,15 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.MultiValueMode; /** * A field data implementation that forbids loading and will throw an {@link IllegalStateException} if you try to load @@ -40,12 +42,12 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore Circuit Breaker - return new DisabledIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache); + return new DisabledIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache); } } - public DisabledIndexFieldData(IndexSettings indexSettings, Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) { - super(indexSettings, fieldNames, fieldDataType, cache); + public DisabledIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache) { + super(indexSettings, fieldName, fieldDataType, cache); } @Override @@ -64,7 +66,7 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore Circuit Breaker - final Names fieldNames = fieldType.names(); + final String fieldName = fieldType.name(); final Settings fdSettings = fieldType.fieldDataType().getSettings(); final Map filter = fdSettings.getGroups("filter"); if (filter != null && !filter.isEmpty()) { - throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.fullName() + "]"); + throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldName + "]"); } - if (BINARY_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) { + if (BINARY_INDEX_FIELD_NAMES.contains(fieldName)) { assert numericType == null; - return new BinaryDVIndexFieldData(indexSettings.getIndex(), fieldNames, fieldType.fieldDataType()); + return new BinaryDVIndexFieldData(indexSettings.getIndex(), fieldName, fieldType.fieldDataType()); } else if (numericType != null) { - return new SortedNumericDVIndexFieldData(indexSettings.getIndex(), fieldNames, numericType, fieldType.fieldDataType()); + return new SortedNumericDVIndexFieldData(indexSettings.getIndex(), fieldName, numericType, fieldType.fieldDataType()); } else { - return new SortedSetDVOrdinalsIndexFieldData(indexSettings, cache, fieldNames, breakerService, fieldType.fieldDataType()); + return new SortedSetDVOrdinalsIndexFieldData(indexSettings, cache, fieldName, breakerService, fieldType.fieldDataType()); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java index ce4dc2559e4..c356ff0d2a4 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java @@ -24,7 +24,6 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.BitSet; -import org.apache.lucene.util.GeoUtils; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.util.LongArray; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java index a0c0a55be71..495cc023e90 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java @@ -54,17 +54,17 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData @Override public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new GeoPointArrayIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, + return new GeoPointArrayIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache, breakerService, fieldType.fieldDataType().getSettings() .getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).before(Version.V_2_2_0) || indexSettings.getIndexVersionCreated().before(Version.V_2_2_0)); } } - public GeoPointArrayIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, + public GeoPointArrayIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService, final boolean indexCreatedBefore22) { - super(indexSettings, fieldNames, fieldDataType, cache); + super(indexSettings, fieldName, fieldDataType, cache); this.breakerService = breakerService; this.indexCreatedBefore22 = indexCreatedBefore22; } @@ -73,7 +73,7 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception { LeafReader reader = context.reader(); - Terms terms = reader.terms(getFieldNames().indexName()); + Terms terms = reader.terms(getFieldName()); AtomicGeoPointFieldData data = null; // TODO: Use an actual estimator to estimate before loading. NonEstimatingEstimator estimator = new NonEstimatingEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA)); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java index e836f13609b..f2c4fa826f1 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java @@ -19,7 +19,11 @@ package org.elasticsearch.index.fielddata.plain; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.IndexSettings; @@ -42,7 +46,7 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData { @Override public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new IndexIndexFieldData(indexSettings, fieldType.names()); + return new IndexIndexFieldData(indexSettings, fieldType.name()); } } @@ -96,8 +100,8 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData { private final AtomicOrdinalsFieldData atomicFieldData; - private IndexIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names names) { - super(indexSettings, names, new FieldDataType("string"), null, null); + private IndexIndexFieldData(IndexSettings indexSettings, String name) { + super(indexSettings, name, new FieldDataType("string"), null, null); atomicFieldData = new IndexAtomicFieldData(index().name()); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java index 2dcffbe12cc..ce4f5c55eac 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java @@ -20,7 +20,12 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.codecs.blocktree.FieldReader; import org.apache.lucene.codecs.blocktree.Stats; -import org.apache.lucene.index.*; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PagedBytes; @@ -28,7 +33,12 @@ import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; +import org.elasticsearch.index.fielddata.RamAccountingTermsEnum; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.MappedFieldType; @@ -47,13 +57,13 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { @Override public IndexOrdinalsFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new PagedBytesIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService); + return new PagedBytesIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache, breakerService); } } - public PagedBytesIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, + public PagedBytesIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { - super(indexSettings, fieldNames, fieldDataType, cache, breakerService); + super(indexSettings, fieldName, fieldDataType, cache, breakerService); } @Override @@ -61,8 +71,8 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { LeafReader reader = context.reader(); AtomicOrdinalsFieldData data = null; - PagedBytesEstimator estimator = new PagedBytesEstimator(context, breakerService.getBreaker(CircuitBreaker.FIELDDATA), getFieldNames().fullName()); - Terms terms = reader.terms(getFieldNames().indexName()); + PagedBytesEstimator estimator = new PagedBytesEstimator(context, breakerService.getBreaker(CircuitBreaker.FIELDDATA), getFieldName()); + Terms terms = reader.terms(getFieldName()); if (terms == null) { data = AbstractAtomicOrdinalsFieldData.empty(); estimator.afterLoad(null, data.ramBytesUsed()); @@ -157,10 +167,10 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { public long estimateStringFieldData() { try { LeafReader reader = context.reader(); - Terms terms = reader.terms(getFieldNames().indexName()); + Terms terms = reader.terms(getFieldName()); Fields fields = reader.fields(); - final Terms fieldTerms = fields.terms(getFieldNames().indexName()); + final Terms fieldTerms = fields.terms(getFieldName()); if (fieldTerms instanceof FieldReader) { final Stats stats = ((FieldReader) fieldTerms).getStats(); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java index b1393542098..14d0375ba57 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java @@ -19,8 +19,14 @@ package org.elasticsearch.index.fielddata.plain; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.MultiDocValues.OrdinalMap; +import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongValues; @@ -33,19 +39,30 @@ import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicParentChildFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexParentChildFieldData; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; /** @@ -57,10 +74,10 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData parentTypes; private final CircuitBreakerService breakerService; - public ParentChildIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, + public ParentChildIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache, MapperService mapperService, CircuitBreakerService breakerService) { - super(indexSettings, fieldNames, fieldDataType, cache); + super(indexSettings, fieldName, fieldDataType, cache); this.breakerService = breakerService; Set parentTypes = new HashSet<>(); for (DocumentMapper mapper : mapperService.docMappers(false)) { @@ -129,7 +146,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData - * Order of values within a document is consistent with + * Order of values within a document is consistent with * {@link Long#compareTo(Long)}. *

    - * Although the API is multi-valued, most codecs in Lucene specialize + * Although the API is multi-valued, most codecs in Lucene specialize * for the case where documents have at most one value. In this case * {@link DocValues#unwrapSingleton(SortedNumericDocValues)} will return - * the underlying single-valued NumericDocValues representation, and + * the underlying single-valued NumericDocValues representation, and * {@link DocValues#unwrapSingletonBits(SortedNumericDocValues)} will return * a Bits matching documents that have a real value (as opposed to missing). */ @@ -125,7 +134,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple return Collections.emptyList(); } } - + /** * FieldData implementation for 32-bit float values. *

    @@ -134,17 +143,17 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple * transformation is applied at both index and search: * {@code bits ^ (bits >> 31) & 0x7fffffff} *

    - * Although the API is multi-valued, most codecs in Lucene specialize + * Although the API is multi-valued, most codecs in Lucene specialize * for the case where documents have at most one value. In this case * {@link FieldData#unwrapSingleton(SortedNumericDoubleValues)} will return - * the underlying single-valued NumericDoubleValues representation, and + * the underlying single-valued NumericDoubleValues representation, and * {@link FieldData#unwrapSingletonBits(SortedNumericDoubleValues)} will return * a Bits matching documents that have a real value (as opposed to missing). */ static final class SortedNumericFloatFieldData extends AtomicDoubleFieldData { final LeafReader reader; final String field; - + SortedNumericFloatFieldData(LeafReader reader, String field) { super(0L); this.reader = reader; @@ -155,7 +164,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple public SortedNumericDoubleValues getDoubleValues() { try { SortedNumericDocValues raw = DocValues.getSortedNumeric(reader, field); - + NumericDocValues single = DocValues.unwrapSingleton(raw); if (single != null) { return FieldData.singleton(new SingleFloatValues(single), DocValues.unwrapSingletonBits(raw)); @@ -166,19 +175,19 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple throw new IllegalStateException("Cannot load doc values", e); } } - + @Override public Collection getChildResources() { return Collections.emptyList(); } } - - /** + + /** * Wraps a NumericDocValues and exposes a single 32-bit float per document. */ static final class SingleFloatValues extends NumericDoubleValues { final NumericDocValues in; - + SingleFloatValues(NumericDocValues in) { this.in = in; } @@ -188,17 +197,17 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple return NumericUtils.sortableIntToFloat((int) in.get(docID)); } } - - /** + + /** * Wraps a SortedNumericDocValues and exposes multiple 32-bit floats per document. */ static final class MultiFloatValues extends SortedNumericDoubleValues { final SortedNumericDocValues in; - + MultiFloatValues(SortedNumericDocValues in) { this.in = in; } - + @Override public void setDocument(int doc) { in.setDocument(doc); @@ -214,7 +223,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple return in.count(); } } - + /** * FieldData implementation for 64-bit double values. *

    @@ -223,17 +232,17 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple * transformation is applied at both index and search: * {@code bits ^ (bits >> 63) & 0x7fffffffffffffffL} *

    - * Although the API is multi-valued, most codecs in Lucene specialize + * Although the API is multi-valued, most codecs in Lucene specialize * for the case where documents have at most one value. In this case * {@link FieldData#unwrapSingleton(SortedNumericDoubleValues)} will return - * the underlying single-valued NumericDoubleValues representation, and + * the underlying single-valued NumericDoubleValues representation, and * {@link FieldData#unwrapSingletonBits(SortedNumericDoubleValues)} will return * a Bits matching documents that have a real value (as opposed to missing). */ static final class SortedNumericDoubleFieldData extends AtomicDoubleFieldData { final LeafReader reader; final String field; - + SortedNumericDoubleFieldData(LeafReader reader, String field) { super(0L); this.reader = reader; @@ -249,7 +258,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple throw new IllegalStateException("Cannot load doc values", e); } } - + @Override public Collection getChildResources() { return Collections.emptyList(); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java index fcbbe884bd4..fc4f6f15d51 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java @@ -23,11 +23,14 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -39,8 +42,8 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i private final IndexFieldDataCache cache; private final CircuitBreakerService breakerService; - public SortedSetDVOrdinalsIndexFieldData(IndexSettings indexSettings, IndexFieldDataCache cache, Names fieldNames, CircuitBreakerService breakerService, FieldDataType fieldDataType) { - super(indexSettings.getIndex(), fieldNames, fieldDataType); + public SortedSetDVOrdinalsIndexFieldData(IndexSettings indexSettings, IndexFieldDataCache cache, String fieldName, CircuitBreakerService breakerService, FieldDataType fieldDataType) { + super(indexSettings.getIndex(), fieldName, fieldDataType); this.indexSettings = indexSettings; this.cache = cache; this.breakerService = breakerService; @@ -53,7 +56,7 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i @Override public AtomicOrdinalsFieldData load(LeafReaderContext context) { - return new SortedSetDVBytesAtomicFieldData(context.reader(), fieldNames.indexName()); + return new SortedSetDVBytesAtomicFieldData(context.reader(), fieldName); } @Override @@ -69,7 +72,7 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i } boolean fieldFound = false; for (LeafReaderContext context : indexReader.leaves()) { - if (context.reader().getFieldInfos().fieldInfo(getFieldNames().indexName()) != null) { + if (context.reader().getFieldInfos().fieldInfo(getFieldName()) != null) { fieldFound = true; break; } diff --git a/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java b/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java index 899da8f3738..9d7ed36bfd7 100644 --- a/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java +++ b/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.StoredFieldVisitor; -import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -95,7 +94,7 @@ public class FieldsVisitor extends StoredFieldVisitor { } // can't derive exact mapping type for (Map.Entry> entry : fields().entrySet()) { - MappedFieldType fieldType = mapperService.indexName(entry.getKey()); + MappedFieldType fieldType = mapperService.fullName(entry.getKey()); if (fieldType == null) { continue; } @@ -113,7 +112,7 @@ public class FieldsVisitor extends StoredFieldVisitor { if (fieldMapper == null) { // it's possible index name doesn't match field name (legacy feature) for (FieldMapper mapper : documentMapper.mappers()) { - if (mapper.fieldType().names().indexName().equals(indexName)) { + if (mapper.fieldType().name().equals(indexName)) { fieldMapper = mapper; break; } diff --git a/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java b/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java index 3d5a01c41d1..a9880d59f65 100644 --- a/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java +++ b/core/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.internal.IdFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; @@ -70,7 +69,7 @@ public class SingleFieldsVisitor extends FieldsVisitor { if (fieldsValues == null) { return; } - List fieldValues = fieldsValues.get(fieldType.names().indexName()); + List fieldValues = fieldsValues.get(fieldType.name()); if (fieldValues == null) { return; } diff --git a/core/src/main/java/org/elasticsearch/index/indexing/IndexingStats.java b/core/src/main/java/org/elasticsearch/index/indexing/IndexingStats.java index 3df62994f96..07ca8af17e3 100644 --- a/core/src/main/java/org/elasticsearch/index/indexing/IndexingStats.java +++ b/core/src/main/java/org/elasticsearch/index/indexing/IndexingStats.java @@ -43,19 +43,14 @@ public class IndexingStats implements Streamable, ToXContent { private long indexTimeInMillis; private long indexCurrent; private long indexFailedCount; - private long deleteCount; private long deleteTimeInMillis; private long deleteCurrent; - private long noopUpdateCount; - private long throttleTimeInMillis; private boolean isThrottled; - Stats() { - - } + Stats() {} public Stats(long indexCount, long indexTimeInMillis, long indexCurrent, long indexFailedCount, long deleteCount, long deleteTimeInMillis, long deleteCurrent, long noopUpdateCount, boolean isThrottled, long throttleTimeInMillis) { this.indexCount = indexCount; @@ -87,26 +82,29 @@ public class IndexingStats implements Streamable, ToXContent { } } - public long getIndexCount() { - return indexCount; - } + /** + * The total number of indexing operations + */ + public long getIndexCount() { return indexCount; } - public long getIndexFailedCount() { - return indexFailedCount; - } + /** + * The number of failed indexing operations + */ + public long getIndexFailedCount() { return indexFailedCount; } - public TimeValue getIndexTime() { - return new TimeValue(indexTimeInMillis); - } + /** + * The total amount of time spend on executing index operations. + */ + public TimeValue getIndexTime() { return new TimeValue(indexTimeInMillis); } - public long getIndexTimeInMillis() { - return indexTimeInMillis; - } - - public long getIndexCurrent() { - return indexCurrent; - } + /** + * Returns the currently in-flight indexing operations. + */ + public long getIndexCurrent() { return indexCurrent;} + /** + * Returns the number of delete operation executed + */ public long getDeleteCount() { return deleteCount; } @@ -114,32 +112,21 @@ public class IndexingStats implements Streamable, ToXContent { /** * Returns if the index is under merge throttling control */ - public boolean isThrottled() { - return isThrottled; - } - - /** - * Gets the amount of time in milliseconds that the index has been under merge throttling control - */ - public long getThrottleTimeInMillis() { - return throttleTimeInMillis; - } + public boolean isThrottled() { return isThrottled; } /** * Gets the amount of time in a TimeValue that the index has been under merge throttling control */ - public TimeValue getThrottleTime() { - return new TimeValue(throttleTimeInMillis); - } + public TimeValue getThrottleTime() { return new TimeValue(throttleTimeInMillis); } - public TimeValue getDeleteTime() { - return new TimeValue(deleteTimeInMillis); - } - - public long getDeleteTimeInMillis() { - return deleteTimeInMillis; - } + /** + * The total amount of time spend on executing delete operations. + */ + public TimeValue getDeleteTime() { return new TimeValue(deleteTimeInMillis); } + /** + * Returns the currently in-flight delete operations + */ public long getDeleteCurrent() { return deleteCurrent; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ContentPath.java b/core/src/main/java/org/elasticsearch/index/mapper/ContentPath.java index 47c43720162..54c6ef20e3e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ContentPath.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ContentPath.java @@ -19,16 +19,9 @@ package org.elasticsearch.index.mapper; -public class ContentPath { +public final class ContentPath { - public enum Type { - JUST_NAME, - FULL, - } - - private Type pathType; - - private final char delimiter; + private static final char DELIMITER = '.'; private final StringBuilder sb; @@ -47,7 +40,6 @@ public class ContentPath { * number of path elements to not be included in {@link #pathAsText(String)}. */ public ContentPath(int offset) { - this.delimiter = '.'; this.sb = new StringBuilder(); this.offset = offset; reset(); @@ -71,26 +63,11 @@ public class ContentPath { } public String pathAsText(String name) { - if (pathType == Type.JUST_NAME) { - return name; - } - return fullPathAsText(name); - } - - public String fullPathAsText(String name) { sb.setLength(0); for (int i = offset; i < index; i++) { - sb.append(path[i]).append(delimiter); + sb.append(path[i]).append(DELIMITER); } sb.append(name); return sb.toString(); } - - public Type pathType() { - return pathType; - } - - public void pathType(Type type) { - this.pathType = type; - } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index aa933b4ad3e..57f2ff40530 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -20,13 +20,12 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.analysis.Analyzer; -import org.elasticsearch.common.collect.CopyOnWriteHashMap; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.FieldNameAnalyzer; -import java.util.AbstractMap; import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; @@ -38,44 +37,38 @@ import java.util.Set; public final class DocumentFieldMappers implements Iterable { /** Full field name to mapper */ - private final CopyOnWriteHashMap fieldMappers; + private final Map fieldMappers; private final FieldNameAnalyzer indexAnalyzer; private final FieldNameAnalyzer searchAnalyzer; private final FieldNameAnalyzer searchQuoteAnalyzer; - public DocumentFieldMappers(AnalysisService analysisService) { - this(new CopyOnWriteHashMap(), - new FieldNameAnalyzer(analysisService.defaultIndexAnalyzer()), - new FieldNameAnalyzer(analysisService.defaultSearchAnalyzer()), - new FieldNameAnalyzer(analysisService.defaultSearchQuoteAnalyzer())); - } - - private DocumentFieldMappers(CopyOnWriteHashMap fieldMappers, FieldNameAnalyzer indexAnalyzer, FieldNameAnalyzer searchAnalyzer, FieldNameAnalyzer searchQuoteAnalyzer) { - this.fieldMappers = fieldMappers; - this.indexAnalyzer = indexAnalyzer; - this.searchAnalyzer = searchAnalyzer; - this.searchQuoteAnalyzer = searchQuoteAnalyzer; - } - - public DocumentFieldMappers copyAndAllAll(Collection newMappers) { - CopyOnWriteHashMap map = this.fieldMappers; - for (FieldMapper fieldMapper : newMappers) { - map = map.copyAndPut(fieldMapper.fieldType().names().fullName(), fieldMapper); + private static void put(Map analyzers, String key, Analyzer value, Analyzer defaultValue) { + if (value == null) { + value = defaultValue; } - FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(newMappers.stream().map((input) -> - new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().indexAnalyzer()) - )); - FieldNameAnalyzer searchAnalyzer = this.searchAnalyzer.copyAndAddAll(newMappers.stream().map((input) -> - new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchAnalyzer()) - )); - FieldNameAnalyzer searchQuoteAnalyzer = this.searchQuoteAnalyzer.copyAndAddAll(newMappers.stream().map((input) -> - new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer) input.fieldType().searchQuoteAnalyzer()) - )); - return new DocumentFieldMappers(map,indexAnalyzer,searchAnalyzer,searchQuoteAnalyzer); + analyzers.put(key, value); } -/** Returns the mapper for the given field */ + public DocumentFieldMappers(Collection mappers, Analyzer defaultIndex, Analyzer defaultSearch, Analyzer defaultSearchQuote) { + Map fieldMappers = new HashMap<>(); + Map indexAnalyzers = new HashMap<>(); + Map searchAnalyzers = new HashMap<>(); + Map searchQuoteAnalyzers = new HashMap<>(); + for (FieldMapper mapper : mappers) { + fieldMappers.put(mapper.name(), mapper); + MappedFieldType fieldType = mapper.fieldType(); + put(indexAnalyzers, fieldType.name(), fieldType.indexAnalyzer(), defaultIndex); + put(searchAnalyzers, fieldType.name(), fieldType.searchAnalyzer(), defaultSearch); + put(searchQuoteAnalyzers, fieldType.name(), fieldType.searchQuoteAnalyzer(), defaultSearchQuote); + } + this.fieldMappers = Collections.unmodifiableMap(fieldMappers); + this.indexAnalyzer = new FieldNameAnalyzer(indexAnalyzers); + this.searchAnalyzer = new FieldNameAnalyzer(searchAnalyzers); + this.searchQuoteAnalyzer = new FieldNameAnalyzer(searchQuoteAnalyzers); + } + + /** Returns the mapper for the given field */ public FieldMapper getMapper(String field) { return fieldMappers.get(field); } @@ -83,10 +76,10 @@ public final class DocumentFieldMappers implements Iterable { public Collection simpleMatchToFullName(String pattern) { Set fields = new HashSet<>(); for (FieldMapper fieldMapper : this) { - if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) { - fields.add(fieldMapper.fieldType().names().fullName()); - } else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) { - fields.add(fieldMapper.fieldType().names().fullName()); + if (Regex.simpleMatch(pattern, fieldMapper.fieldType().name())) { + fields.add(fieldMapper.fieldType().name()); + } else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().name())) { + fields.add(fieldMapper.fieldType().name()); } } return fields; @@ -98,7 +91,7 @@ public final class DocumentFieldMappers implements Iterable { return fieldMapper; } for (FieldMapper otherFieldMapper : this) { - if (otherFieldMapper.fieldType().names().indexName().equals(name)) { + if (otherFieldMapper.fieldType().name().equals(name)) { return otherFieldMapper; } } @@ -113,14 +106,6 @@ public final class DocumentFieldMappers implements Iterable { return this.indexAnalyzer; } - /** - * A smart analyzer used for indexing that takes into account specific analyzers configured - * per {@link FieldMapper} with a custom default analyzer for no explicit field analyzer. - */ - public Analyzer indexAnalyzer(Analyzer defaultAnalyzer) { - return new FieldNameAnalyzer(indexAnalyzer.analyzers(), defaultAnalyzer); - } - /** * A smart analyzer used for searching that takes into account specific analyzers configured * per {@link FieldMapper}. diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index c4fec8cf095..c2d644d393d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -20,21 +20,19 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.elasticsearch.ElasticsearchGenerationException; -import org.elasticsearch.Version; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.StringAndBytesText; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.MetadataFieldMapper.TypeParser; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.IdFieldMapper; @@ -52,14 +50,12 @@ import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.concurrent.locks.ReentrantReadWriteLock; import static java.util.Collections.emptyMap; @@ -72,16 +68,14 @@ public class DocumentMapper implements ToXContent { private Map, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>(); - private final Settings indexSettings; - private final RootObjectMapper rootObjectMapper; private Map meta = emptyMap(); private final Mapper.BuilderContext builderContext; - public Builder(Settings indexSettings, RootObjectMapper.Builder builder, MapperService mapperService) { - this.indexSettings = indexSettings; + public Builder(RootObjectMapper.Builder builder, MapperService mapperService) { + final Settings indexSettings = mapperService.getIndexSettings().getSettings(); this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1)); this.rootObjectMapper = builder.build(builderContext); @@ -104,49 +98,41 @@ public class DocumentMapper implements ToXContent { return this; } - public DocumentMapper build(MapperService mapperService, DocumentMapperParser docMapperParser) { + public DocumentMapper build(MapperService mapperService) { Objects.requireNonNull(rootObjectMapper, "Mapper builder must have the root object mapper set"); - return new DocumentMapper(mapperService, indexSettings, docMapperParser, rootObjectMapper, meta, metadataMappers, mapperService.mappingLock); + Mapping mapping = new Mapping( + mapperService.getIndexSettings().getIndexVersionCreated(), + rootObjectMapper, + metadataMappers.values().toArray(new MetadataFieldMapper[metadataMappers.values().size()]), + meta); + return new DocumentMapper(mapperService, mapping); } } private final MapperService mapperService; private final String type; - private final StringAndBytesText typeText; + private final Text typeText; - private volatile CompressedXContent mappingSource; + private final CompressedXContent mappingSource; private final Mapping mapping; private final DocumentParser documentParser; - private volatile DocumentFieldMappers fieldMappers; + private final DocumentFieldMappers fieldMappers; - private volatile Map objectMappers = Collections.emptyMap(); + private final Map objectMappers; - private boolean hasNestedObjects = false; + private final boolean hasNestedObjects; - private final ReleasableLock mappingWriteLock; - private final ReentrantReadWriteLock mappingLock; - - public DocumentMapper(MapperService mapperService, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser, - RootObjectMapper rootObjectMapper, - Map meta, - Map, MetadataFieldMapper> metadataMappers, - ReentrantReadWriteLock mappingLock) { + public DocumentMapper(MapperService mapperService, Mapping mapping) { this.mapperService = mapperService; - this.type = rootObjectMapper.name(); - this.typeText = new StringAndBytesText(this.type); - this.mapping = new Mapping( - Version.indexCreated(indexSettings), - rootObjectMapper, - metadataMappers.values().toArray(new MetadataFieldMapper[metadataMappers.values().size()]), - meta); - this.documentParser = new DocumentParser(indexSettings, docMapperParser, this, new ReleasableLock(mappingLock.readLock())); - - this.mappingWriteLock = new ReleasableLock(mappingLock.writeLock()); - this.mappingLock = mappingLock; + this.type = mapping.root().name(); + this.typeText = new Text(this.type); + final IndexSettings indexSettings = mapperService.getIndexSettings(); + this.mapping = mapping; + this.documentParser = new DocumentParser(indexSettings, mapperService.documentMapperParser(), this); if (metadataMapper(ParentFieldMapper.class).active()) { // mark the routing field mapper as required @@ -163,7 +149,11 @@ public class DocumentMapper implements ToXContent { } MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers); - this.fieldMappers = new DocumentFieldMappers(docMapperParser.analysisService).copyAndAllAll(newFieldMappers); + final AnalysisService analysisService = mapperService.analysisService(); + this.fieldMappers = new DocumentFieldMappers(newFieldMappers, + analysisService.defaultIndexAnalyzer(), + analysisService.defaultSearchAnalyzer(), + analysisService.defaultSearchQuoteAnalyzer()); Map builder = new HashMap<>(); for (ObjectMapper objectMapper : newObjectMappers) { @@ -173,14 +163,20 @@ public class DocumentMapper implements ToXContent { } } + boolean hasNestedObjects = false; this.objectMappers = Collections.unmodifiableMap(builder); for (ObjectMapper objectMapper : newObjectMappers) { if (objectMapper.nested().isNested()) { hasNestedObjects = true; } } + this.hasNestedObjects = hasNestedObjects; - refreshSource(); + try { + mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS); + } catch (Exception e) { + throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e); + } } public Mapping mapping() { @@ -297,12 +293,12 @@ public class DocumentMapper implements ToXContent { // We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and // therefor is guaranteed to be a live doc. final Weight nestedWeight = filter.createWeight(sc.searcher(), false); - DocIdSetIterator iterator = nestedWeight.scorer(context); - if (iterator == null) { + Scorer scorer = nestedWeight.scorer(context); + if (scorer == null) { continue; } - if (iterator.advance(nestedDocId) == nestedDocId) { + if (scorer.iterator().advance(nestedDocId) == nestedDocId) { if (nestedObjectMapper == null) { nestedObjectMapper = objectMapper; } else { @@ -334,43 +330,17 @@ public class DocumentMapper implements ToXContent { return mapperService.getParentTypes().contains(type); } - private void addMappers(Collection objectMappers, Collection fieldMappers, boolean updateAllTypes) { - assert mappingLock.isWriteLockedByCurrentThread(); - - // update mappers for this document type - Map builder = new HashMap<>(this.objectMappers); - for (ObjectMapper objectMapper : objectMappers) { - builder.put(objectMapper.fullPath(), objectMapper); - if (objectMapper.nested().isNested()) { - hasNestedObjects = true; - } - } - this.objectMappers = Collections.unmodifiableMap(builder); - this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers); - - // finally update for the entire index - mapperService.addMappers(type, objectMappers, fieldMappers); + public DocumentMapper merge(Mapping mapping, boolean updateAllTypes) { + Mapping merged = this.mapping.merge(mapping, updateAllTypes); + return new DocumentMapper(mapperService, merged); } - public MergeResult merge(Mapping mapping, boolean simulate, boolean updateAllTypes) { - try (ReleasableLock lock = mappingWriteLock.acquire()) { - mapperService.checkMappersCompatibility(type, mapping, updateAllTypes); - final MergeResult mergeResult = new MergeResult(simulate, updateAllTypes); - this.mapping.merge(mapping, mergeResult); - if (simulate == false) { - addMappers(mergeResult.getNewObjectMappers(), mergeResult.getNewFieldMappers(), updateAllTypes); - refreshSource(); - } - return mergeResult; - } - } - - private void refreshSource() throws ElasticsearchGenerationException { - try { - mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS); - } catch (Exception e) { - throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e); - } + /** + * Recursively update sub field types. + */ + public DocumentMapper updateFieldType(Map fullNameToFieldType) { + Mapping updated = this.mapping.updateFieldType(fullNameToFieldType); + return new DocumentMapper(mapperService, updated); } public void close() { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java index 8951ecf0f4e..f087e06e3c5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -37,14 +36,15 @@ import org.elasticsearch.index.mapper.object.RootObjectMapper; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.mapper.MapperRegistry; -import java.util.*; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.index.mapper.MapperBuilders.doc; public class DocumentMapperParser { - private final Settings indexSettings; final MapperService mapperService; final AnalysisService analysisService; private static final ESLogger logger = Loggers.getLogger(DocumentMapperParser.class); @@ -60,8 +60,7 @@ public class DocumentMapperParser { public DocumentMapperParser(IndexSettings indexSettings, MapperService mapperService, AnalysisService analysisService, SimilarityService similarityService, MapperRegistry mapperRegistry) { - this.indexSettings = indexSettings.getSettings(); - this.parseFieldMatcher = new ParseFieldMatcher(this.indexSettings); + this.parseFieldMatcher = new ParseFieldMatcher(indexSettings.getSettings()); this.mapperService = mapperService; this.analysisService = analysisService; this.similarityService = similarityService; @@ -74,32 +73,11 @@ public class DocumentMapperParser { return new Mapper.TypeParser.ParserContext(type, analysisService, similarityService::getSimilarity, mapperService, typeParsers::get, indexVersionCreated, parseFieldMatcher); } - public DocumentMapper parse(String source) throws MapperParsingException { - return parse(null, source); - } - - public DocumentMapper parse(@Nullable String type, String source) throws MapperParsingException { + public DocumentMapper parse(@Nullable String type, CompressedXContent source) throws MapperParsingException { return parse(type, source, null); } - public DocumentMapper parse(@Nullable String type, String source, String defaultSource) throws MapperParsingException { - Map mapping = null; - if (source != null) { - Tuple> t = extractMapping(type, source); - type = t.v1(); - mapping = t.v2(); - } - if (mapping == null) { - mapping = new HashMap<>(); - } - return parse(type, mapping, defaultSource); - } - - public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source) throws MapperParsingException { - return parseCompressed(type, source, null); - } - - public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source, String defaultSource) throws MapperParsingException { + public DocumentMapper parse(@Nullable String type, CompressedXContent source, String defaultSource) throws MapperParsingException { Map mapping = null; if (source != null) { Map root = XContentHelper.convertToMap(source.compressedReference(), true).v2(); @@ -129,7 +107,7 @@ public class DocumentMapperParser { Mapper.TypeParser.ParserContext parserContext = parserContext(type); // parse RootObjectMapper - DocumentMapper.Builder docBuilder = doc(indexSettings, (RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext), mapperService); + DocumentMapper.Builder docBuilder = doc((RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext), mapperService); Iterator> iterator = mapping.entrySet().iterator(); // parse DocumentMapper while(iterator.hasNext()) { @@ -156,7 +134,7 @@ public class DocumentMapperParser { checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: "); - return docBuilder.build(mapperService, this); + return docBuilder.build(mapperService); } public static void checkNoRemainingFields(String fieldName, Map fieldNodeMap, Version indexVersionCreated) { @@ -165,11 +143,7 @@ public class DocumentMapperParser { public static void checkNoRemainingFields(Map fieldNodeMap, Version indexVersionCreated, String message) { if (!fieldNodeMap.isEmpty()) { - if (indexVersionCreated.onOrAfter(Version.V_2_0_0_beta1)) { - throw new MapperParsingException(message + getRemainingFields(fieldNodeMap)); - } else { - logger.debug(message + "{}", getRemainingFields(fieldNodeMap)); - } + throw new MapperParsingException(message + getRemainingFields(fieldNodeMap)); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index b0ad972d575..c1362287d67 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -23,16 +23,12 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.CloseableThreadLocal; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType; -import org.elasticsearch.index.mapper.core.NumberFieldMapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; @@ -53,29 +49,21 @@ class DocumentParser implements Closeable { private CloseableThreadLocal cache = new CloseableThreadLocal() { @Override protected ParseContext.InternalParseContext initialValue() { - return new ParseContext.InternalParseContext(indexSettings, docMapperParser, docMapper, new ContentPath(0)); + return new ParseContext.InternalParseContext(indexSettings.getSettings(), docMapperParser, docMapper, new ContentPath(0)); } }; - private final Settings indexSettings; + private final IndexSettings indexSettings; private final DocumentMapperParser docMapperParser; private final DocumentMapper docMapper; - private final ReleasableLock parseLock; - public DocumentParser(Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ReleasableLock parseLock) { + public DocumentParser(IndexSettings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper) { this.indexSettings = indexSettings; this.docMapperParser = docMapperParser; this.docMapper = docMapper; - this.parseLock = parseLock; } public ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException { - try (ReleasableLock lock = parseLock.acquire()){ - return innerParseDocument(source); - } - } - - private ParsedDocument innerParseDocument(SourceToParse source) throws MapperParsingException { if (docMapper.type().equals(MapperService.DEFAULT_MAPPING)) { throw new IllegalArgumentException("It is forbidden to index into the default mapping [" + MapperService.DEFAULT_MAPPING + "]"); } @@ -132,8 +120,7 @@ class DocumentParser implements Closeable { // try to parse the next token, this should be null if the object is ended properly // but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled by the catch) - if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1) - && source.parser() == null && parser != null) { + if (source.parser() == null && parser != null) { // only check for end of tokens if we created the parser here token = parser.nextToken(); if (token != null) { @@ -200,8 +187,7 @@ class DocumentParser implements Closeable { XContentParser parser = context.parser(); String currentFieldName = parser.currentName(); - if (atRoot && MapperService.isMetadataField(currentFieldName) && - Version.indexCreated(context.indexSettings()).onOrAfter(Version.V_2_0_0_beta1)) { + if (atRoot && MapperService.isMetadataField(currentFieldName)) { throw new MapperParsingException("Field [" + currentFieldName + "] is a metadata field and cannot be added inside a document. Use the index API request parameters."); } XContentParser.Token token = parser.currentToken(); @@ -234,9 +220,6 @@ class DocumentParser implements Closeable { nestedDoc.add(new Field(TypeFieldMapper.NAME, mapper.nestedTypePathAsString(), TypeFieldMapper.Defaults.FIELD_TYPE)); } - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(mapper.pathType()); - // if we are at the end of the previous object, advance if (token == XContentParser.Token.END_OBJECT) { token = parser.nextToken(); @@ -267,12 +250,11 @@ class DocumentParser implements Closeable { if (update == null) { update = newUpdate; } else { - MapperUtils.merge(update, newUpdate); + update = update.merge(newUpdate, false); } } } // restore the enable path flag - context.path().pathType(origPathType); if (nested.isNested()) { ParseContext.Document nestedDoc = context.doc(); ParseContext.Document parentDoc = nestedDoc.getParent(); @@ -341,7 +323,7 @@ class DocumentParser implements Closeable { context.path().remove(); Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "object"); if (builder == null) { - builder = MapperBuilders.object(currentFieldName).enabled(true).pathType(mapper.pathType()); + builder = MapperBuilders.object(currentFieldName).enabled(true); // if this is a non root object, then explicitly set the dynamic behavior if set if (!(mapper instanceof RootObjectMapper) && mapper.dynamic() != ObjectMapper.Defaults.DYNAMIC) { ((ObjectMapper.Builder) builder).dynamic(mapper.dynamic()); @@ -609,40 +591,22 @@ class DocumentParser implements Closeable { if (dynamic == ObjectMapper.Dynamic.FALSE) { return null; } + final String path = context.path().pathAsText(currentFieldName); final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); - final MappedFieldType existingFieldType = context.mapperService().fullName(context.path().fullPathAsText(currentFieldName)); + final MappedFieldType existingFieldType = context.mapperService().fullName(path); Mapper.Builder builder = null; if (existingFieldType != null) { // create a builder of the same type builder = createBuilderFromFieldType(context, existingFieldType, currentFieldName); - if (builder != null) { - // best-effort to not introduce a conflict - if (builder instanceof StringFieldMapper.Builder) { - StringFieldMapper.Builder stringBuilder = (StringFieldMapper.Builder) builder; - stringBuilder.fieldDataSettings(existingFieldType.fieldDataType().getSettings()); - stringBuilder.store(existingFieldType.stored()); - stringBuilder.indexOptions(existingFieldType.indexOptions()); - stringBuilder.tokenized(existingFieldType.tokenized()); - stringBuilder.omitNorms(existingFieldType.omitNorms()); - stringBuilder.docValues(existingFieldType.hasDocValues()); - stringBuilder.indexAnalyzer(existingFieldType.indexAnalyzer()); - stringBuilder.searchAnalyzer(existingFieldType.searchAnalyzer()); - } else if (builder instanceof NumberFieldMapper.Builder) { - NumberFieldMapper.Builder numberBuilder = (NumberFieldMapper.Builder) builder; - numberBuilder.fieldDataSettings(existingFieldType.fieldDataType().getSettings()); - numberBuilder.store(existingFieldType.stored()); - numberBuilder.indexOptions(existingFieldType.indexOptions()); - numberBuilder.tokenized(existingFieldType.tokenized()); - numberBuilder.omitNorms(existingFieldType.omitNorms()); - numberBuilder.docValues(existingFieldType.hasDocValues()); - numberBuilder.precisionStep(existingFieldType.numericPrecisionStep()); - } - } } if (builder == null) { builder = createBuilderFromDynamicValue(context, token, currentFieldName); } Mapper mapper = builder.build(builderContext); + if (existingFieldType != null) { + // try to not introduce a conflict + mapper = mapper.updateFieldType(Collections.singletonMap(path, existingFieldType)); + } mapper = parseAndMergeUpdate(mapper, context); @@ -695,7 +659,7 @@ class DocumentParser implements Closeable { if (paths.length > 1) { ObjectMapper parent = context.root(); for (int i = 0; i < paths.length-1; i++) { - mapper = context.docMapper().objectMappers().get(context.path().fullPathAsText(paths[i])); + mapper = context.docMapper().objectMappers().get(context.path().pathAsText(paths[i])); if (mapper == null) { // One mapping is missing, check if we are allowed to create a dynamic one. ObjectMapper.Dynamic dynamic = parent.dynamic(); @@ -713,12 +677,12 @@ class DocumentParser implements Closeable { if (!(parent instanceof RootObjectMapper) && parent.dynamic() != ObjectMapper.Defaults.DYNAMIC) { ((ObjectMapper.Builder) builder).dynamic(parent.dynamic()); } - builder = MapperBuilders.object(paths[i]).enabled(true).pathType(parent.pathType()); + builder = MapperBuilders.object(paths[i]).enabled(true); } Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); mapper = (ObjectMapper) builder.build(builderContext); if (mapper.nested() != ObjectMapper.Nested.NO) { - throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().fullPathAsText(paths[i]) + "]) through `copy_to`"); + throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + "]) through `copy_to`"); } break; case FALSE: @@ -759,7 +723,7 @@ class DocumentParser implements Closeable { private static M parseAndMergeUpdate(M mapper, ParseContext context) throws IOException { final Mapper update = parseObjectOrField(context, mapper); if (update != null) { - MapperUtils.merge(mapper, update); + mapper = (M) mapper.merge(update, false); } return mapper; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index ced3f08b229..23d8cd56034 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -24,7 +24,6 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lucene.Lucene; @@ -44,10 +43,10 @@ import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; -import java.util.Locale; +import java.util.Map; import java.util.stream.StreamSupport; -public abstract class FieldMapper extends Mapper { +public abstract class FieldMapper extends Mapper implements Cloneable { public abstract static class Builder extends Mapper.Builder { @@ -64,10 +63,10 @@ public abstract class FieldMapper extends Mapper { protected final MultiFields.Builder multiFieldsBuilder; protected CopyTo copyTo; - protected Builder(String name, MappedFieldType fieldType) { + protected Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType) { super(name); this.fieldType = fieldType.clone(); - this.defaultFieldType = fieldType.clone(); + this.defaultFieldType = defaultFieldType.clone(); this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable multiFieldsBuilder = new MultiFields.Builder(); } @@ -84,8 +83,13 @@ public abstract class FieldMapper extends Mapper { * if the fieldType has a non-null option we are all good it might have been set through a different * call. */ - final IndexOptions options = getDefaultIndexOption(); - assert options != IndexOptions.NONE : "default IndexOptions is NONE can't enable indexing"; + IndexOptions options = getDefaultIndexOption(); + if (options == IndexOptions.NONE) { + // can happen when an existing type on the same index has disabled indexing + // since we inherit the default field type from the first mapper that is + // created on an index + throw new IllegalArgumentException("mapper [" + name + "] has different [index] values from other types of the same index"); + } fieldType.setIndexOptions(options); } } else { @@ -202,11 +206,6 @@ public abstract class FieldMapper extends Mapper { return this; } - public T multiFieldPathType(ContentPath.Type pathType) { - multiFieldsBuilder.pathType(pathType); - return builder; - } - public T addMultiField(Mapper.Builder mapperBuilder) { multiFieldsBuilder.add(mapperBuilder); return builder; @@ -217,31 +216,12 @@ public abstract class FieldMapper extends Mapper { return builder; } - protected MappedFieldType.Names buildNames(BuilderContext context) { - return new MappedFieldType.Names(buildIndexName(context), buildIndexNameClean(context), buildFullName(context)); - } - - protected String buildIndexName(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) { - return buildFullName(context); - } - String actualIndexName = indexName == null ? name : indexName; - return context.path().pathAsText(actualIndexName); - } - - protected String buildIndexNameClean(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) { - return buildFullName(context); - } - return indexName == null ? name : indexName; - } - protected String buildFullName(BuilderContext context) { - return context.path().fullPathAsText(name); + return context.path().pathAsText(name); } protected void setupFieldType(BuilderContext context) { - fieldType.setNames(buildNames(context)); + fieldType.setName(buildFullName(context)); if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) { fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); @@ -250,10 +230,7 @@ public abstract class FieldMapper extends Mapper { Settings settings = Settings.builder().put(fieldType.fieldDataType().getSettings()).put(fieldDataSettings).build(); fieldType.setFieldDataType(new FieldDataType(fieldType.fieldDataType().getType(), settings)); } - boolean defaultDocValues = false; // pre 2.0 - if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) { - defaultDocValues = fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE; - } + boolean defaultDocValues = fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE; // backcompat for "fielddata: format: docvalues" for now... boolean fieldDataDocValues = fieldType.fieldDataType() != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldType.fieldDataType().getFormat(context.indexSettings())); @@ -268,17 +245,16 @@ public abstract class FieldMapper extends Mapper { } } - protected MappedFieldTypeReference fieldTypeRef; + protected MappedFieldType fieldType; protected final MappedFieldType defaultFieldType; - protected final MultiFields multiFields; + protected MultiFields multiFields; protected CopyTo copyTo; - protected final boolean indexCreatedBefore2x; protected FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName); assert indexSettings != null; - this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1); - this.fieldTypeRef = new MappedFieldTypeReference(fieldType); // the reference ctor freezes the field type + fieldType.freeze(); + this.fieldType = fieldType; defaultFieldType.freeze(); this.defaultFieldType = defaultFieldType; this.multiFields = multiFields; @@ -287,27 +263,11 @@ public abstract class FieldMapper extends Mapper { @Override public String name() { - return fieldType().names().fullName(); + return fieldType().name(); } public MappedFieldType fieldType() { - return fieldTypeRef.get(); - } - - /** Returns a reference to the MappedFieldType for this mapper. */ - public MappedFieldTypeReference fieldTypeReference() { - return fieldTypeRef; - } - - /** - * Updates the reference to this field's MappedFieldType. - * Implementations should assert equality of the underlying field type - */ - public void setFieldTypeReference(MappedFieldTypeReference ref) { - if (ref.get().equals(fieldType()) == false) { - throw new IllegalStateException("Cannot overwrite field type reference to unequal reference"); - } - this.fieldTypeRef = ref; + return fieldType; } /** @@ -333,7 +293,7 @@ public abstract class FieldMapper extends Mapper { context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e); + throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); } multiFields.parse(this, context); return null; @@ -351,34 +311,65 @@ public abstract class FieldMapper extends Mapper { return false; } + @Override public Iterator iterator() { - if (multiFields == null) { - return Collections.emptyIterator(); - } return multiFields.iterator(); } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected FieldMapper clone() { + try { + return (FieldMapper) super.clone(); + } catch (CloneNotSupportedException e) { + throw new AssertionError(e); + } + } + + @Override + public FieldMapper merge(Mapper mergeWith, boolean updateAllTypes) { + FieldMapper merged = clone(); + merged.doMerge(mergeWith, updateAllTypes); + return merged; + } + + /** + * Merge changes coming from {@code mergeWith} in place. + * @param updateAllTypes TODO + */ + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { if (!this.getClass().equals(mergeWith.getClass())) { String mergedType = mergeWith.getClass().getSimpleName(); if (mergeWith instanceof FieldMapper) { mergedType = ((FieldMapper) mergeWith).contentType(); } - mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]"); - // different types, return - return; + throw new IllegalArgumentException("mapper [" + fieldType().name() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]"); } FieldMapper fieldMergeWith = (FieldMapper) mergeWith; - multiFields.merge(mergeWith, mergeResult); + multiFields = multiFields.merge(fieldMergeWith.multiFields); - if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { - // apply changeable values - MappedFieldType fieldType = fieldMergeWith.fieldType().clone(); - fieldType.freeze(); - fieldTypeRef.set(fieldType); - this.copyTo = fieldMergeWith.copyTo; + // apply changeable values + this.fieldType = fieldMergeWith.fieldType; + this.copyTo = fieldMergeWith.copyTo; + } + + @Override + public FieldMapper updateFieldType(Map fullNameToFieldType) { + final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.name()); + if (newFieldType == null) { + // this field does not exist in the mappings yet + // this can happen if this mapper represents a mapping update + return this; + } else if (fieldType.getClass() != newFieldType.getClass()) { + throw new IllegalStateException("Mixing up field types: " + fieldType.getClass() + " != " + newFieldType.getClass()); } + MultiFields updatedMultiFields = multiFields.updateFieldType(fullNameToFieldType); + if (fieldType == newFieldType && multiFields == updatedMultiFields) { + return this; // no change + } + FieldMapper updated = clone(); + updated.fieldType = newFieldType; + updated.multiFields = updatedMultiFields; + return updated; } @Override @@ -392,9 +383,6 @@ public abstract class FieldMapper extends Mapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { builder.field("type", contentType()); - if (indexCreatedBefore2x && (includeDefaults || !simpleName().equals(fieldType().names().originalIndexName()))) { - builder.field("index_name", fieldType().names().originalIndexName()); - } if (includeDefaults || fieldType().boost() != 1.0f) { builder.field("boost", fieldType().boost()); @@ -520,18 +508,12 @@ public abstract class FieldMapper extends Mapper { public static class MultiFields { public static MultiFields empty() { - return new MultiFields(ContentPath.Type.FULL, ImmutableOpenMap.of()); + return new MultiFields(ImmutableOpenMap.of()); } public static class Builder { private final ImmutableOpenMap.Builder mapperBuilders = ImmutableOpenMap.builder(); - private ContentPath.Type pathType = ContentPath.Type.FULL; - - public Builder pathType(ContentPath.Type pathType) { - this.pathType = pathType; - return this; - } public Builder add(Mapper.Builder builder) { mapperBuilders.put(builder.name(), builder); @@ -540,13 +522,9 @@ public abstract class FieldMapper extends Mapper { @SuppressWarnings("unchecked") public MultiFields build(FieldMapper.Builder mainFieldBuilder, BuilderContext context) { - if (pathType == ContentPath.Type.FULL && mapperBuilders.isEmpty()) { + if (mapperBuilders.isEmpty()) { return empty(); - } else if (mapperBuilders.isEmpty()) { - return new MultiFields(pathType, ImmutableOpenMap.of()); } else { - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(pathType); context.path().add(mainFieldBuilder.name()); ImmutableOpenMap.Builder mapperBuilders = this.mapperBuilders; for (ObjectObjectCursor cursor : this.mapperBuilders) { @@ -557,26 +535,25 @@ public abstract class FieldMapper extends Mapper { mapperBuilders.put(key, mapper); } context.path().remove(); - context.path().pathType(origPathType); ImmutableOpenMap.Builder mappers = mapperBuilders.cast(); - return new MultiFields(pathType, mappers.build()); + return new MultiFields(mappers.build()); } } } - private final ContentPath.Type pathType; - private volatile ImmutableOpenMap mappers; + private final ImmutableOpenMap mappers; - public MultiFields(ContentPath.Type pathType, ImmutableOpenMap mappers) { - this.pathType = pathType; - this.mappers = mappers; + private MultiFields(ImmutableOpenMap mappers) { + ImmutableOpenMap.Builder builder = new ImmutableOpenMap.Builder<>(); // we disable the all in multi-field mappers - for (ObjectCursor cursor : mappers.values()) { + for (ObjectObjectCursor cursor : mappers) { FieldMapper mapper = cursor.value; if (mapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll(); + mapper = (FieldMapper) ((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll(); } + builder.put(cursor.key, mapper); } + this.mappers = builder.build(); } public void parse(FieldMapper mainField, ParseContext context) throws IOException { @@ -587,58 +564,54 @@ public abstract class FieldMapper extends Mapper { context = context.createMultiFieldContext(); - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(pathType); - context.path().add(mainField.simpleName()); for (ObjectCursor cursor : mappers.values()) { cursor.value.parse(context); } context.path().remove(); - context.path().pathType(origPathType); } - // No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge - public void merge(Mapper mergeWith, MergeResult mergeResult) { - FieldMapper mergeWithMultiField = (FieldMapper) mergeWith; + public MultiFields merge(MultiFields mergeWith) { + ImmutableOpenMap.Builder newMappersBuilder = ImmutableOpenMap.builder(mappers); - List newFieldMappers = null; - ImmutableOpenMap.Builder newMappersBuilder = null; - - for (ObjectCursor cursor : mergeWithMultiField.multiFields.mappers.values()) { + for (ObjectCursor cursor : mergeWith.mappers.values()) { FieldMapper mergeWithMapper = cursor.value; - Mapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName()); + FieldMapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName()); if (mergeIntoMapper == null) { - // no mapping, simply add it if not simulating - if (!mergeResult.simulate()) { - // we disable the all in multi-field mappers - if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll(); - } - if (newMappersBuilder == null) { - newMappersBuilder = ImmutableOpenMap.builder(mappers); - } - newMappersBuilder.put(mergeWithMapper.simpleName(), mergeWithMapper); - if (mergeWithMapper instanceof FieldMapper) { - if (newFieldMappers == null) { - newFieldMappers = new ArrayList<>(2); - } - newFieldMappers.add(mergeWithMapper); - } + // we disable the all in multi-field mappers + if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) { + mergeWithMapper = (FieldMapper) ((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll(); } + newMappersBuilder.put(mergeWithMapper.simpleName(), mergeWithMapper); } else { - mergeIntoMapper.merge(mergeWithMapper, mergeResult); + FieldMapper merged = mergeIntoMapper.merge(mergeWithMapper, false); + newMappersBuilder.put(merged.simpleName(), merged); // override previous definition } } - // first add all field mappers - if (newFieldMappers != null) { - mergeResult.addFieldMappers(newFieldMappers); + ImmutableOpenMap mappers = newMappersBuilder.build(); + return new MultiFields(mappers); + } + + public MultiFields updateFieldType(Map fullNameToFieldType) { + ImmutableOpenMap.Builder newMappersBuilder = null; + + for (ObjectCursor cursor : mappers.values()) { + FieldMapper updated = cursor.value.updateFieldType(fullNameToFieldType); + if (updated != cursor.value) { + if (newMappersBuilder == null) { + newMappersBuilder = ImmutableOpenMap.builder(mappers); + } + newMappersBuilder.put(updated.simpleName(), updated); + } } - // now publish mappers - if (newMappersBuilder != null) { - mappers = newMappersBuilder.build(); + + if (newMappersBuilder == null) { + return this; } + + ImmutableOpenMap mappers = newMappersBuilder.build(); + return new MultiFields(mappers); } public Iterator iterator() { @@ -646,9 +619,6 @@ public abstract class FieldMapper extends Mapper { } public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (pathType != ContentPath.Type.FULL) { - builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); - } if (!mappers.isEmpty()) { // sort the mappers so we get consistent serialization format Mapper[] sortedMappers = mappers.values().toArray(Mapper.class); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index da21e599cc9..5e9378e2f55 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -37,34 +37,22 @@ import java.util.Set; class FieldTypeLookup implements Iterable { /** Full field name to field type */ - private final CopyOnWriteHashMap fullNameToFieldType; + final CopyOnWriteHashMap fullNameToFieldType; /** Full field name to types containing a mapping for this full name. */ - private final CopyOnWriteHashMap> fullNameToTypes; - - /** Index field name to field type */ - private final CopyOnWriteHashMap indexNameToFieldType; - - /** Index field name to types containing a mapping for this index name. */ - private final CopyOnWriteHashMap> indexNameToTypes; + final CopyOnWriteHashMap> fullNameToTypes; /** Create a new empty instance. */ public FieldTypeLookup() { fullNameToFieldType = new CopyOnWriteHashMap<>(); fullNameToTypes = new CopyOnWriteHashMap<>(); - indexNameToFieldType = new CopyOnWriteHashMap<>(); - indexNameToTypes = new CopyOnWriteHashMap<>(); } private FieldTypeLookup( - CopyOnWriteHashMap fullName, - CopyOnWriteHashMap> fullNameToTypes, - CopyOnWriteHashMap indexName, - CopyOnWriteHashMap> indexNameToTypes) { + CopyOnWriteHashMap fullName, + CopyOnWriteHashMap> fullNameToTypes) { this.fullNameToFieldType = fullName; this.fullNameToTypes = fullNameToTypes; - this.indexNameToFieldType = indexName; - this.indexNameToTypes = indexNameToTypes; } private static CopyOnWriteHashMap> addType(CopyOnWriteHashMap> map, String key, String type) { @@ -89,47 +77,29 @@ class FieldTypeLookup implements Iterable { * from the provided fields. If a field already exists, the field type will be updated * to use the new mappers field type. */ - public FieldTypeLookup copyAndAddAll(String type, Collection newFieldMappers) { + public FieldTypeLookup copyAndAddAll(String type, Collection fieldMappers, boolean updateAllTypes) { Objects.requireNonNull(type, "type must not be null"); if (MapperService.DEFAULT_MAPPING.equals(type)) { throw new IllegalArgumentException("Default mappings should not be added to the lookup"); } - CopyOnWriteHashMap fullName = this.fullNameToFieldType; - CopyOnWriteHashMap> fullNameToTypes = this.fullNameToTypes; - CopyOnWriteHashMap indexName = this.indexNameToFieldType; - CopyOnWriteHashMap> indexNameToTypes = this.indexNameToTypes; - for (FieldMapper fieldMapper : newFieldMappers) { + CopyOnWriteHashMap fullName = this.fullNameToFieldType; + CopyOnWriteHashMap> fullNameToTypes = this.fullNameToTypes; + + for (FieldMapper fieldMapper : fieldMappers) { MappedFieldType fieldType = fieldMapper.fieldType(); - MappedFieldTypeReference fullNameRef = fullName.get(fieldType.names().fullName()); - MappedFieldTypeReference indexNameRef = indexName.get(fieldType.names().indexName()); - if (fullNameRef == null && indexNameRef == null) { - // new field, just use the ref from this field mapper - fullName = fullName.copyAndPut(fieldType.names().fullName(), fieldMapper.fieldTypeReference()); - indexName = indexName.copyAndPut(fieldType.names().indexName(), fieldMapper.fieldTypeReference()); - } else if (fullNameRef == null) { - // this index name already exists, so copy over the reference - fullName = fullName.copyAndPut(fieldType.names().fullName(), indexNameRef); - indexNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed - fieldMapper.setFieldTypeReference(indexNameRef); - } else if (indexNameRef == null) { - // this full name already exists, so copy over the reference - indexName = indexName.copyAndPut(fieldType.names().indexName(), fullNameRef); - fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed - fieldMapper.setFieldTypeReference(fullNameRef); - } else if (fullNameRef == indexNameRef) { - // the field already exists, so replace the reference in this mapper with the pre-existing one - fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed - fieldMapper.setFieldTypeReference(fullNameRef); - } else { - // this new field bridges between two existing field names (a full and index name), which we cannot support - throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName()); + MappedFieldType fullNameFieldType = fullName.get(fieldType.name()); + + // is the update even legal? + checkCompatibility(type, fieldMapper, updateAllTypes); + + if (fieldType != fullNameFieldType) { + fullName = fullName.copyAndPut(fieldType.name(), fieldMapper.fieldType()); } - fullNameToTypes = addType(fullNameToTypes, fieldType.names().fullName(), type); - indexNameToTypes = addType(indexNameToTypes, fieldType.names().indexName(), type); + fullNameToTypes = addType(fullNameToTypes, fieldType.name(), type); } - return new FieldTypeLookup(fullName, fullNameToTypes, indexName, indexNameToTypes); + return new FieldTypeLookup(fullName, fullNameToTypes); } private static boolean beStrict(String type, Set types, boolean updateAllTypes) { @@ -145,42 +115,26 @@ class FieldTypeLookup implements Iterable { } /** - * Checks if the given mappers' field types are compatible with existing field types. - * If any are not compatible, an IllegalArgumentException is thrown. + * Checks if the given field type is compatible with an existing field type. + * An IllegalArgumentException is thrown in case of incompatibility. * If updateAllTypes is true, only basic compatibility is checked. */ - public void checkCompatibility(String type, Collection fieldMappers, boolean updateAllTypes) { - for (FieldMapper fieldMapper : fieldMappers) { - MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName()); - if (ref != null) { - List conflicts = new ArrayList<>(); - final Set types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName()); - boolean strict = beStrict(type, types, updateAllTypes); - ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); - if (conflicts.isEmpty() == false) { - throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with existing mapping in other types:\n" + conflicts.toString()); - } - } - - // field type for the index name must be compatible too - MappedFieldTypeReference indexNameRef = indexNameToFieldType.get(fieldMapper.fieldType().names().indexName()); - if (indexNameRef != null) { - List conflicts = new ArrayList<>(); - final Set types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName()); - boolean strict = beStrict(type, types, updateAllTypes); - indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); - if (conflicts.isEmpty() == false) { - throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with mapping with the same index name in other types" + conflicts.toString()); - } + private void checkCompatibility(String type, FieldMapper fieldMapper, boolean updateAllTypes) { + MappedFieldType fieldType = fullNameToFieldType.get(fieldMapper.fieldType().name()); + if (fieldType != null) { + List conflicts = new ArrayList<>(); + final Set types = fullNameToTypes.get(fieldMapper.fieldType().name()); + boolean strict = beStrict(type, types, updateAllTypes); + fieldType.checkCompatibility(fieldMapper.fieldType(), conflicts, strict); + if (conflicts.isEmpty() == false) { + throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().name() + "] conflicts with existing mapping in other types:\n" + conflicts.toString()); } } } /** Returns the field for the given field */ public MappedFieldType get(String field) { - MappedFieldTypeReference ref = fullNameToFieldType.get(field); - if (ref == null) return null; - return ref.get(); + return fullNameToFieldType.get(field); } /** Get the set of types that have a mapping for the given field. */ @@ -192,53 +146,23 @@ class FieldTypeLookup implements Iterable { return types; } - /** Returns the field type for the given index name */ - public MappedFieldType getByIndexName(String field) { - MappedFieldTypeReference ref = indexNameToFieldType.get(field); - if (ref == null) return null; - return ref.get(); - } - - /** Get the set of types that have a mapping for the given field. */ - public Set getTypesByIndexName(String field) { - Set types = indexNameToTypes.get(field); - if (types == null) { - types = Collections.emptySet(); - } - return types; - } - - /** - * Returns a list of the index names of a simple match regex like pattern against full name and index name. - */ - public Collection simpleMatchToIndexNames(String pattern) { - Set fields = new HashSet<>(); - for (MappedFieldType fieldType : this) { - if (Regex.simpleMatch(pattern, fieldType.names().fullName())) { - fields.add(fieldType.names().indexName()); - } else if (Regex.simpleMatch(pattern, fieldType.names().indexName())) { - fields.add(fieldType.names().indexName()); - } - } - return fields; - } - /** * Returns a list of the full names of a simple match regex like pattern against full name and index name. */ public Collection simpleMatchToFullName(String pattern) { Set fields = new HashSet<>(); for (MappedFieldType fieldType : this) { - if (Regex.simpleMatch(pattern, fieldType.names().fullName())) { - fields.add(fieldType.names().fullName()); - } else if (Regex.simpleMatch(pattern, fieldType.names().indexName())) { - fields.add(fieldType.names().fullName()); + if (Regex.simpleMatch(pattern, fieldType.name())) { + fields.add(fieldType.name()); + } else if (Regex.simpleMatch(pattern, fieldType.name())) { + fields.add(fieldType.name()); } } return fields; } + @Override public Iterator iterator() { - return fullNameToFieldType.values().stream().map((p) -> p.get()).iterator(); + return fullNameToFieldType.values().iterator(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 32e749992e6..5f8049b55fb 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -24,7 +24,14 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.*; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Nullable; @@ -46,68 +53,6 @@ import java.util.Objects; */ public abstract class MappedFieldType extends FieldType { - public static class Names { - - private final String indexName; - - private final String originalIndexName; - - private final String fullName; - - public Names(String name) { - this(name, name, name); - } - - public Names(String indexName, String originalIndexName, String fullName) { - this.indexName = indexName; - this.originalIndexName = originalIndexName; - this.fullName = fullName; - } - - /** - * The indexed name of the field. This is the name under which we will - * store it in the index. - */ - public String indexName() { - return indexName; - } - - /** - * The original index name, before any "path" modifications performed on it. - */ - public String originalIndexName() { - return originalIndexName; - } - - /** - * The full name, including dot path. - */ - public String fullName() { - return fullName; - } - - @Override - public boolean equals(Object o) { - if (o == null || getClass() != o.getClass()) return false; - - Names names = (Names) o; - - if (!fullName.equals(names.fullName)) return false; - if (!indexName.equals(names.indexName)) return false; - if (!originalIndexName.equals(names.originalIndexName)) return false; - - return true; - } - - @Override - public int hashCode() { - int result = indexName.hashCode(); - result = 31 * result + originalIndexName.hashCode(); - result = 31 * result + fullName.hashCode(); - return result; - } - } - public enum Loading { LAZY { @Override @@ -148,7 +93,7 @@ public abstract class MappedFieldType extends FieldType { } } - private Names names; + private String name; private float boost; // TODO: remove this docvalues flag and use docValuesType private boolean docValues; @@ -163,7 +108,7 @@ public abstract class MappedFieldType extends FieldType { protected MappedFieldType(MappedFieldType ref) { super(ref); - this.names = ref.names(); + this.name = ref.name(); this.boost = ref.boost(); this.docValues = ref.hasDocValues(); this.indexAnalyzer = ref.indexAnalyzer(); @@ -207,7 +152,7 @@ public abstract class MappedFieldType extends FieldType { return boost == fieldType.boost && docValues == fieldType.docValues && - Objects.equals(names, fieldType.names) && + Objects.equals(name, fieldType.name) && Objects.equals(indexAnalyzer, fieldType.indexAnalyzer) && Objects.equals(searchAnalyzer, fieldType.searchAnalyzer) && Objects.equals(searchQuoteAnalyzer(), fieldType.searchQuoteAnalyzer()) && @@ -219,7 +164,7 @@ public abstract class MappedFieldType extends FieldType { @Override public int hashCode() { - return Objects.hash(super.hashCode(), names, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer, + return Objects.hash(super.hashCode(), name, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer, similarity == null ? null : similarity.name(), normsLoading, fieldDataType, nullValue, nullValueAsString); } @@ -231,7 +176,7 @@ public abstract class MappedFieldType extends FieldType { /** Checks this type is the same type as other. Adds a conflict if they are different. */ private final void checkTypeName(MappedFieldType other) { if (typeName().equals(other.typeName()) == false) { - throw new IllegalArgumentException("mapper [" + names().fullName() + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]"); + throw new IllegalArgumentException("mapper [" + name + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]"); } else if (getClass() != other.getClass()) { throw new IllegalStateException("Type names equal for class " + getClass().getSimpleName() + " and " + other.getClass().getSimpleName()); } @@ -249,71 +194,68 @@ public abstract class MappedFieldType extends FieldType { boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE; // TODO: should be validating if index options go "up" (but "down" is ok) if (indexed != mergeWithIndexed || tokenized() != other.tokenized()) { - conflicts.add("mapper [" + names().fullName() + "] has different [index] values"); + conflicts.add("mapper [" + name() + "] has different [index] values"); } if (stored() != other.stored()) { - conflicts.add("mapper [" + names().fullName() + "] has different [store] values"); + conflicts.add("mapper [" + name() + "] has different [store] values"); } if (hasDocValues() == false && other.hasDocValues()) { // don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitly set // when the doc_values field data format is configured - conflicts.add("mapper [" + names().fullName() + "] has different [doc_values] values, cannot change from disabled to enabled"); + conflicts.add("mapper [" + name() + "] has different [doc_values] values, cannot change from disabled to enabled"); } if (omitNorms() && !other.omitNorms()) { - conflicts.add("mapper [" + names().fullName() + "] has different [omit_norms] values, cannot change from disable to enabled"); + conflicts.add("mapper [" + name() + "] has different [omit_norms] values, cannot change from disable to enabled"); } if (storeTermVectors() != other.storeTermVectors()) { - conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector] values"); + conflicts.add("mapper [" + name() + "] has different [store_term_vector] values"); } if (storeTermVectorOffsets() != other.storeTermVectorOffsets()) { - conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_offsets] values"); + conflicts.add("mapper [" + name() + "] has different [store_term_vector_offsets] values"); } if (storeTermVectorPositions() != other.storeTermVectorPositions()) { - conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_positions] values"); + conflicts.add("mapper [" + name() + "] has different [store_term_vector_positions] values"); } if (storeTermVectorPayloads() != other.storeTermVectorPayloads()) { - conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_payloads] values"); + conflicts.add("mapper [" + name() + "] has different [store_term_vector_payloads] values"); } // null and "default"-named index analyzers both mean the default is used if (indexAnalyzer() == null || "default".equals(indexAnalyzer().name())) { if (other.indexAnalyzer() != null && "default".equals(other.indexAnalyzer().name()) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]"); + conflicts.add("mapper [" + name() + "] has different [analyzer]"); } } else if (other.indexAnalyzer() == null || "default".equals(other.indexAnalyzer().name())) { - conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]"); + conflicts.add("mapper [" + name() + "] has different [analyzer]"); } else if (indexAnalyzer().name().equals(other.indexAnalyzer().name()) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]"); + conflicts.add("mapper [" + name() + "] has different [analyzer]"); } - if (!names().indexName().equals(other.names().indexName())) { - conflicts.add("mapper [" + names().fullName() + "] has different [index_name]"); - } if (Objects.equals(similarity(), other.similarity()) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [similarity]"); + conflicts.add("mapper [" + name() + "] has different [similarity]"); } if (strict) { if (omitNorms() != other.omitNorms()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [omit_norms] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [omit_norms] across all types."); } if (boost() != other.boost()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types."); } if (normsLoading() != other.normsLoading()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [norms.loading] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [norms.loading] across all types."); } if (Objects.equals(searchAnalyzer(), other.searchAnalyzer()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types."); } if (Objects.equals(searchQuoteAnalyzer(), other.searchQuoteAnalyzer()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_quote_analyzer] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [search_quote_analyzer] across all types."); } if (Objects.equals(fieldDataType(), other.fieldDataType()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [fielddata] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [fielddata] across all types."); } if (Objects.equals(nullValue(), other.nullValue()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [null_value] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [null_value] across all types."); } } } @@ -326,13 +268,13 @@ public abstract class MappedFieldType extends FieldType { return true; } - public Names names() { - return names; + public String name() { + return name; } - public void setNames(Names names) { + public void setName(String name) { checkIfFrozen(); - this.names = names; + this.name = name; } public float boost() { @@ -449,7 +391,7 @@ public abstract class MappedFieldType extends FieldType { /** Creates a term associated with the field of this mapper for the given value */ protected Term createTerm(Object value) { - return new Term(names().indexName(), indexedValueForSearch(value)); + return new Term(name(), indexedValueForSearch(value)); } public Query termQuery(Object value, @Nullable QueryShardContext context) { @@ -461,11 +403,11 @@ public abstract class MappedFieldType extends FieldType { for (int i = 0; i < bytesRefs.length; i++) { bytesRefs[i] = indexedValueForSearch(values.get(i)); } - return new TermsQuery(names.indexName(), bytesRefs); + return new TermsQuery(name(), bytesRefs); } public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return new TermRangeQuery(names().indexName(), + return new TermRangeQuery(name(), lowerTerm == null ? null : indexedValueForSearch(lowerTerm), upperTerm == null ? null : indexedValueForSearch(upperTerm), includeLower, includeUpper); @@ -485,7 +427,7 @@ public abstract class MappedFieldType extends FieldType { public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { if (numericType() != null) { - throw new QueryShardException(context, "Cannot use regular expression to filter numeric field [" + names.fullName + "]"); + throw new QueryShardException(context, "Cannot use regular expression to filter numeric field [" + name + "]"); } RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java index 33a4dabd3be..ffdae90c436 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -174,5 +174,14 @@ public abstract class Mapper implements ToXContent, Iterable { /** Returns the canonical name which uniquely identifies the mapper against other mappers in a type. */ public abstract String name(); - public abstract void merge(Mapper mergeWith, MergeResult mergeResult); + /** Return the merge of {@code mergeWith} into this. + * Both {@code this} and {@code mergeWith} will be left unmodified. */ + public abstract Mapper merge(Mapper mergeWith, boolean updateAllTypes); + + /** + * Update the field type of this mapper. This is necessary because some mapping updates + * can modify mappings across several types. This method must return a copy of the mapper + * so that the current mapper is not modified. + */ + public abstract Mapper updateFieldType(Map fullNameToFieldType); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java index 0df3c06d042..9ea9e99f01b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperBuilders.java @@ -19,8 +19,18 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.core.*; +import org.elasticsearch.index.mapper.core.BinaryFieldMapper; +import org.elasticsearch.index.mapper.core.BooleanFieldMapper; +import org.elasticsearch.index.mapper.core.ByteFieldMapper; +import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.core.FloatFieldMapper; +import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.ShortFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.ip.IpFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; @@ -30,8 +40,8 @@ public final class MapperBuilders { private MapperBuilders() {} - public static DocumentMapper.Builder doc(Settings settings, RootObjectMapper.Builder objectBuilder, MapperService mapperService) { - return new DocumentMapper.Builder(settings, objectBuilder, mapperService); + public static DocumentMapper.Builder doc(RootObjectMapper.Builder objectBuilder, MapperService mapperService) { + return new DocumentMapper.Builder(objectBuilder, mapperService); } public static RootObjectMapper.Builder rootObject(String name) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 938f610d6db..3f76245aa8f 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -20,24 +20,24 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectHashSet; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; @@ -53,13 +53,23 @@ import org.elasticsearch.script.ScriptService; import java.io.Closeable; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Function; import java.util.stream.Collectors; -import static java.util.Collections.*; +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static java.util.Collections.unmodifiableMap; +import static java.util.Collections.unmodifiableSet; import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; /** @@ -68,6 +78,8 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; public class MapperService extends AbstractIndexComponent implements Closeable { public static final String DEFAULT_MAPPING = "_default_"; + public static final String INDEX_MAPPER_DYNAMIC_SETTING = "index.mapper.dynamic"; + public static final boolean INDEX_MAPPER_DYNAMIC_DEFAULT = true; private static ObjectHashSet META_FIELDS = ObjectHashSet.from( "_uid", "_id", "_type", "_all", "_parent", "_routing", "_index", "_size", "_timestamp", "_ttl" @@ -85,14 +97,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable { private volatile Map mappers = emptyMap(); - // A lock for mappings: modifications (put mapping) need to be performed - // under the write lock and read operations (document parsing) need to be - // performed under the read lock - final ReentrantReadWriteLock mappingLock = new ReentrantReadWriteLock(); - private final ReleasableLock mappingWriteLock = new ReleasableLock(mappingLock.writeLock()); - private volatile FieldTypeLookup fieldTypes; - private volatile ImmutableOpenMap fullPathObjectMappers = ImmutableOpenMap.of(); + private volatile Map fullPathObjectMappers = new HashMap<>(); private boolean hasNested = false; // updated dynamically to true when a nested object is added private final DocumentMapperParser documentParser; @@ -120,7 +126,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer()); this.mapperRegistry = mapperRegistry; - this.dynamic = this.indexSettings.getSettings().getAsBoolean("index.mapper.dynamic", true); + this.dynamic = this.indexSettings.getSettings().getAsBoolean(INDEX_MAPPER_DYNAMIC_SETTING, INDEX_MAPPER_DYNAMIC_DEFAULT); defaultPercolatorMappingSource = "{\n" + "\"_default_\":{\n" + "\"properties\" : {\n" + @@ -199,10 +205,11 @@ public class MapperService extends AbstractIndexComponent implements Closeable { public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault, boolean updateAllTypes) { if (DEFAULT_MAPPING.equals(type)) { // verify we can parse it - DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource); + // NOTE: never apply the default here + DocumentMapper mapper = documentParser.parse(type, mappingSource); // still add it as a document mapper so we have it registered and, for example, persisted back into // the cluster meta data if needed, or checked for existence - try (ReleasableLock lock = mappingWriteLock.acquire()) { + synchronized (this) { mappers = newMapBuilder(mappers).put(type, mapper).map(); } try { @@ -212,75 +219,119 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } return mapper; } else { - return merge(parse(type, mappingSource, applyDefault), updateAllTypes); + synchronized (this) { + // only apply the default mapping if we don't have the type yet + applyDefault &= mappers.containsKey(type) == false; + return merge(parse(type, mappingSource, applyDefault), updateAllTypes); + } } } - // never expose this to the outside world, we need to reparse the doc mapper so we get fresh - // instances of field mappers to properly remove existing doc mapper - private DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) { - try (ReleasableLock lock = mappingWriteLock.acquire()) { - if (mapper.type().length() == 0) { - throw new InvalidTypeNameException("mapping type name is empty"); - } - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1) && mapper.type().length() > 255) { - throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] is too long; limit is length 255 but was [" + mapper.type().length() + "]"); - } - if (mapper.type().charAt(0) == '_') { - throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] can't start with '_'"); - } - if (mapper.type().contains("#")) { - throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include '#' in it"); - } - if (mapper.type().contains(",")) { - throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include ',' in it"); - } - if (mapper.type().equals(mapper.parentFieldMapper().type())) { - throw new IllegalArgumentException("The [_parent.type] option can't point to the same type"); - } - if (typeNameStartsWithIllegalDot(mapper)) { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - throw new IllegalArgumentException("mapping type name [" + mapper.type() + "] must not start with a '.'"); - } else { - logger.warn("Type [{}] starts with a '.', it is recommended not to start a type name with a '.'", mapper.type()); - } - } - // we can add new field/object mappers while the old ones are there - // since we get new instances of those, and when we remove, we remove - // by instance equality - DocumentMapper oldMapper = mappers.get(mapper.type()); + private synchronized DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) { + if (mapper.type().length() == 0) { + throw new InvalidTypeNameException("mapping type name is empty"); + } + if (mapper.type().length() > 255) { + throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] is too long; limit is length 255 but was [" + mapper.type().length() + "]"); + } + if (mapper.type().charAt(0) == '_') { + throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] can't start with '_'"); + } + if (mapper.type().contains("#")) { + throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include '#' in it"); + } + if (mapper.type().contains(",")) { + throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include ',' in it"); + } + if (mapper.type().equals(mapper.parentFieldMapper().type())) { + throw new IllegalArgumentException("The [_parent.type] option can't point to the same type"); + } + if (typeNameStartsWithIllegalDot(mapper)) { + throw new IllegalArgumentException("mapping type name [" + mapper.type() + "] must not start with a '.'"); + } - if (oldMapper != null) { - // simulate first - MergeResult result = oldMapper.merge(mapper.mapping(), true, updateAllTypes); - if (result.hasConflicts()) { - throw new IllegalArgumentException("Merge failed with failures {" + Arrays.toString(result.buildConflicts()) + "}"); - } - // then apply for real - result = oldMapper.merge(mapper.mapping(), false, updateAllTypes); - assert result.hasConflicts() == false; // we already simulated - return oldMapper; - } else { - Tuple, Collection> newMappers = checkMappersCompatibility( - mapper.type(), mapper.mapping(), updateAllTypes); - Collection newObjectMappers = newMappers.v1(); - Collection newFieldMappers = newMappers.v2(); - addMappers(mapper.type(), newObjectMappers, newFieldMappers); + // 1. compute the merged DocumentMapper + DocumentMapper oldMapper = mappers.get(mapper.type()); + DocumentMapper newMapper; + if (oldMapper != null) { + newMapper = oldMapper.merge(mapper.mapping(), updateAllTypes); + } else { + newMapper = mapper; + } - for (DocumentTypeListener typeListener : typeListeners) { - typeListener.beforeCreate(mapper); - } - mappers = newMapBuilder(mappers).put(mapper.type(), mapper).map(); - if (mapper.parentFieldMapper().active()) { - Set newParentTypes = new HashSet<>(parentTypes.size() + 1); - newParentTypes.addAll(parentTypes); - newParentTypes.add(mapper.parentFieldMapper().type()); - parentTypes = unmodifiableSet(newParentTypes); - } - assert assertSerialization(mapper); - return mapper; + // 2. check basic sanity of the new mapping + List objectMappers = new ArrayList<>(); + List fieldMappers = new ArrayList<>(); + Collections.addAll(fieldMappers, newMapper.mapping().metadataMappers); + MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers); + checkFieldUniqueness(newMapper.type(), objectMappers, fieldMappers); + checkObjectsCompatibility(newMapper.type(), objectMappers, fieldMappers, updateAllTypes); + + // 3. update lookup data-structures + // this will in particular make sure that the merged fields are compatible with other types + FieldTypeLookup fieldTypes = this.fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers, updateAllTypes); + + boolean hasNested = this.hasNested; + Map fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers); + for (ObjectMapper objectMapper : objectMappers) { + fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper); + if (objectMapper.nested().isNested()) { + hasNested = true; } } + fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers); + Set parentTypes = this.parentTypes; + if (oldMapper == null && newMapper.parentFieldMapper().active()) { + parentTypes = new HashSet<>(parentTypes.size() + 1); + parentTypes.addAll(this.parentTypes); + parentTypes.add(mapper.parentFieldMapper().type()); + parentTypes = Collections.unmodifiableSet(parentTypes); + } + + Map mappers = new HashMap<>(this.mappers); + mappers.put(newMapper.type(), newMapper); + for (Map.Entry entry : mappers.entrySet()) { + if (entry.getKey().equals(DEFAULT_MAPPING)) { + continue; + } + DocumentMapper m = entry.getValue(); + // apply changes to the field types back + m = m.updateFieldType(fieldTypes.fullNameToFieldType); + entry.setValue(m); + } + mappers = Collections.unmodifiableMap(mappers); + + // 4. commit the change + this.mappers = mappers; + this.fieldTypes = fieldTypes; + this.hasNested = hasNested; + this.fullPathObjectMappers = fullPathObjectMappers; + this.parentTypes = parentTypes; + + // 5. send notifications about the change + if (oldMapper == null) { + // means the mapping was created + for (DocumentTypeListener typeListener : typeListeners) { + typeListener.beforeCreate(mapper); + } + } + + assert assertSerialization(newMapper); + assert assertMappersShareSameFieldType(); + + return newMapper; + } + + private boolean assertMappersShareSameFieldType() { + for (DocumentMapper mapper : docMappers(false)) { + List fieldMappers = new ArrayList<>(); + Collections.addAll(fieldMappers, mapper.mapping().metadataMappers); + MapperUtils.collect(mapper.root(), new ArrayList(), fieldMappers); + for (FieldMapper fieldMapper : fieldMappers) { + assert fieldMapper.fieldType() == fieldTypes.get(fieldMapper.name()) : fieldMapper.name(); + } + } + return true; } private boolean typeNameStartsWithIllegalDot(DocumentMapper mapper) { @@ -300,45 +351,45 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return true; } - protected void checkMappersCompatibility(String type, Collection objectMappers, Collection fieldMappers, boolean updateAllTypes) { - assert mappingLock.isWriteLockedByCurrentThread(); + private void checkFieldUniqueness(String type, Collection objectMappers, Collection fieldMappers) { + final Set objectFullNames = new HashSet<>(); + for (ObjectMapper objectMapper : objectMappers) { + final String fullPath = objectMapper.fullPath(); + if (objectFullNames.add(fullPath) == false) { + throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice in mapping for type [" + type + "]"); + } + } + + final Set fieldNames = new HashSet<>(); + for (FieldMapper fieldMapper : fieldMappers) { + final String name = fieldMapper.name(); + if (objectFullNames.contains(name)) { + throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field in [" + type + "]"); + } else if (fieldNames.add(name) == false) { + throw new IllegalArgumentException("Field [" + name + "] is defined twice in [" + type + "]"); + } + } + } + + private void checkObjectsCompatibility(String type, Collection objectMappers, Collection fieldMappers, boolean updateAllTypes) { + assert Thread.holdsLock(this); + + checkFieldUniqueness(type, objectMappers, fieldMappers); + for (ObjectMapper newObjectMapper : objectMappers) { ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath()); if (existingObjectMapper != null) { - MergeResult result = new MergeResult(true, updateAllTypes); - existingObjectMapper.merge(newObjectMapper, result); - if (result.hasConflicts()) { - throw new IllegalArgumentException("Mapper for [" + newObjectMapper.fullPath() + "] conflicts with existing mapping in other types" + - Arrays.toString(result.buildConflicts())); - } + // simulate a merge and ignore the result, we are just interested + // in exceptions here + existingObjectMapper.merge(newObjectMapper, updateAllTypes); } } - fieldTypes.checkCompatibility(type, fieldMappers, updateAllTypes); - } - protected Tuple, Collection> checkMappersCompatibility( - String type, Mapping mapping, boolean updateAllTypes) { - List objectMappers = new ArrayList<>(); - List fieldMappers = new ArrayList<>(); - for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { - fieldMappers.add(metadataMapper); - } - MapperUtils.collect(mapping.root, objectMappers, fieldMappers); - checkMappersCompatibility(type, objectMappers, fieldMappers, updateAllTypes); - return new Tuple<>(objectMappers, fieldMappers); - } - - protected void addMappers(String type, Collection objectMappers, Collection fieldMappers) { - assert mappingLock.isWriteLockedByCurrentThread(); - ImmutableOpenMap.Builder fullPathObjectMappers = ImmutableOpenMap.builder(this.fullPathObjectMappers); - for (ObjectMapper objectMapper : objectMappers) { - fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper); - if (objectMapper.nested().isNested()) { - hasNested = true; + for (FieldMapper fieldMapper : fieldMappers) { + if (fullPathObjectMappers.containsKey(fieldMapper.name())) { + throw new IllegalArgumentException("Field [" + fieldMapper.name() + "] is defined as a field in mapping [" + type + "] but this name is already used for an object in other types"); } } - this.fullPathObjectMappers = fullPathObjectMappers.build(); - this.fieldTypes = this.fieldTypes.copyAndAddAll(type, fieldMappers); } public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { @@ -348,7 +399,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } else { defaultMappingSource = this.defaultMappingSource; } - return documentParser.parseCompressed(mappingType, mappingSource, applyDefault ? defaultMappingSource : null); + return documentParser.parse(mappingType, mappingSource, applyDefault ? defaultMappingSource : null); } public boolean hasMapping(String mappingType) { @@ -488,15 +539,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } - /** - * Returns an {@link MappedFieldType} which has the given index name. - * - * If multiple types have fields with the same index name, the first is returned. - */ - public MappedFieldType indexName(String indexName) { - return fieldTypes.getByIndexName(indexName); - } - /** * Returns the {@link MappedFieldType} for the give fullName. * @@ -515,32 +557,13 @@ public class MapperService extends AbstractIndexComponent implements Closeable { // no wildcards return Collections.singletonList(pattern); } - return fieldTypes.simpleMatchToIndexNames(pattern); + return fieldTypes.simpleMatchToFullName(pattern); } - // TODO: remove this since the underlying index names are now the same across all types - public Collection simpleMatchToIndexNames(String pattern, @Nullable String[] types) { - return simpleMatchToIndexNames(pattern); - } - - // TODO: remove types param, since the object mapper must be the same across all types - public ObjectMapper getObjectMapper(String name, @Nullable String[] types) { + public ObjectMapper getObjectMapper(String name) { return fullPathObjectMappers.get(name); } - public MappedFieldType smartNameFieldType(String smartName) { - MappedFieldType fieldType = fullName(smartName); - if (fieldType != null) { - return fieldType; - } - return indexName(smartName); - } - - // TODO: remove this since the underlying index names are now the same across all types - public MappedFieldType smartNameFieldType(String smartName, @Nullable String[] types) { - return smartNameFieldType(smartName); - } - /** * Given a type (eg. long, string, ...), return an anonymous field mapper that can be used for search operations. */ @@ -634,7 +657,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { @Override protected Analyzer getWrappedAnalyzer(String fieldName) { - MappedFieldType fieldType = smartNameFieldType(fieldName); + MappedFieldType fieldType = fullName(fieldName); if (fieldType != null) { Analyzer analyzer = extractAnalyzer.apply(fieldType); if (analyzer != null) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java index d46c32a932b..04508827f77 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java @@ -27,52 +27,6 @@ import java.util.Collection; public enum MapperUtils { ; - private static MergeResult newStrictMergeResult() { - return new MergeResult(false, false) { - - @Override - public void addFieldMappers(Collection fieldMappers) { - // no-op - } - - @Override - public void addObjectMappers(Collection objectMappers) { - // no-op - } - - @Override - public Collection getNewFieldMappers() { - throw new UnsupportedOperationException("Strict merge result does not support new field mappers"); - } - - @Override - public Collection getNewObjectMappers() { - throw new UnsupportedOperationException("Strict merge result does not support new object mappers"); - } - - @Override - public void addConflict(String mergeFailure) { - throw new MapperParsingException("Merging dynamic updates triggered a conflict: " + mergeFailure); - } - }; - } - - /** - * Merge {@code mergeWith} into {@code mergeTo}. Note: this method only - * merges mappings, not lookup structures. Conflicts are returned as exceptions. - */ - public static void merge(Mapper mergeInto, Mapper mergeWith) { - mergeInto.merge(mergeWith, newStrictMergeResult()); - } - - /** - * Merge {@code mergeWith} into {@code mergeTo}. Note: this method only - * merges mappings, not lookup structures. Conflicts are returned as exceptions. - */ - public static void merge(Mapping mergeInto, Mapping mergeWith) { - mergeInto.merge(mergeWith, newStrictMergeResult()); - } - /** Split mapper and its descendants into object and field mappers. */ public static void collect(Mapper mapper, Collection objectMappers, Collection fieldMappers) { if (mapper instanceof RootObjectMapper) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java index bac42162552..6f2fea6a3d4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; -import java.util.List; import java.util.Map; import static java.util.Collections.emptyMap; @@ -41,25 +40,20 @@ import static java.util.Collections.unmodifiableMap; */ public final class Mapping implements ToXContent { - public static final List LEGACY_INCLUDE_IN_OBJECT = Arrays.asList("_all", "_id", "_parent", "_routing", "_timestamp", "_ttl"); - final Version indexCreated; final RootObjectMapper root; final MetadataFieldMapper[] metadataMappers; final Map, MetadataFieldMapper> metadataMappersMap; - volatile Map meta; + final Map meta; public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper, MetadataFieldMapper[] metadataMappers, Map meta) { this.indexCreated = indexCreated; - this.root = rootObjectMapper; this.metadataMappers = metadataMappers; Map, MetadataFieldMapper> metadataMappersMap = new HashMap<>(); for (MetadataFieldMapper metadataMapper : metadataMappers) { - if (indexCreated.before(Version.V_2_0_0_beta1) && LEGACY_INCLUDE_IN_OBJECT.contains(metadataMapper.name())) { - root.putMapper(metadataMapper); - } metadataMappersMap.put(metadataMapper.getClass(), metadataMapper); } + this.root = rootObjectMapper; // keep root mappers sorted for consistent serialization Arrays.sort(metadataMappers, new Comparator() { @Override @@ -89,22 +83,33 @@ public final class Mapping implements ToXContent { return (T) metadataMappersMap.get(clazz); } - /** @see DocumentMapper#merge(Mapping, boolean, boolean) */ - public void merge(Mapping mergeWith, MergeResult mergeResult) { - assert metadataMappers.length == mergeWith.metadataMappers.length; - - root.merge(mergeWith.root, mergeResult); - for (MetadataFieldMapper metadataMapper : metadataMappers) { - MetadataFieldMapper mergeWithMetadataMapper = mergeWith.metadataMapper(metadataMapper.getClass()); - if (mergeWithMetadataMapper != null) { - metadataMapper.merge(mergeWithMetadataMapper, mergeResult); + /** @see DocumentMapper#merge(Mapping, boolean) */ + public Mapping merge(Mapping mergeWith, boolean updateAllTypes) { + RootObjectMapper mergedRoot = root.merge(mergeWith.root, updateAllTypes); + Map, MetadataFieldMapper> mergedMetaDataMappers = new HashMap<>(metadataMappersMap); + for (MetadataFieldMapper metaMergeWith : mergeWith.metadataMappers) { + MetadataFieldMapper mergeInto = mergedMetaDataMappers.get(metaMergeWith.getClass()); + MetadataFieldMapper merged; + if (mergeInto == null) { + merged = metaMergeWith; + } else { + merged = mergeInto.merge(metaMergeWith, updateAllTypes); } + mergedMetaDataMappers.put(merged.getClass(), merged); } + return new Mapping(indexCreated, mergedRoot, mergedMetaDataMappers.values().toArray(new MetadataFieldMapper[0]), mergeWith.meta); + } - if (mergeResult.simulate() == false) { - // let the merge with attributes to override the attributes - meta = mergeWith.meta; + /** + * Recursively update sub field types. + */ + public Mapping updateFieldType(Map fullNameToFieldType) { + final MetadataFieldMapper[] updatedMeta = Arrays.copyOf(metadataMappers, metadataMappers.length); + for (int i = 0; i < updatedMeta.length; ++i) { + updatedMeta[i] = (MetadataFieldMapper) updatedMeta[i].updateFieldType(fullNameToFieldType); } + RootObjectMapper updatedRoot = root.updateFieldType(fullNameToFieldType); + return new Mapping(indexCreated, updatedRoot, updatedMeta, meta); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MergeResult.java b/core/src/main/java/org/elasticsearch/index/mapper/MergeResult.java deleted file mode 100644 index f5698a0ed18..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/MergeResult.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.index.mapper.object.ObjectMapper; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -/** A container for tracking results of a mapping merge. */ -public class MergeResult { - - private final boolean simulate; - private final boolean updateAllTypes; - - private final List conflicts = new ArrayList<>(); - private final List newFieldMappers = new ArrayList<>(); - private final List newObjectMappers = new ArrayList<>(); - - public MergeResult(boolean simulate, boolean updateAllTypes) { - this.simulate = simulate; - this.updateAllTypes = updateAllTypes; - } - - public void addFieldMappers(Collection fieldMappers) { - assert simulate() == false; - newFieldMappers.addAll(fieldMappers); - } - - public void addObjectMappers(Collection objectMappers) { - assert simulate() == false; - newObjectMappers.addAll(objectMappers); - } - - public Collection getNewFieldMappers() { - return newFieldMappers; - } - - public Collection getNewObjectMappers() { - return newObjectMappers; - } - - public boolean simulate() { - return simulate; - } - - public boolean updateAllTypes() { - return updateAllTypes; - } - - public void addConflict(String mergeFailure) { - conflicts.add(mergeFailure); - } - - public boolean hasConflicts() { - return conflicts.isEmpty() == false; - } - - public String[] buildConflicts() { - return conflicts.toArray(Strings.EMPTY_ARRAY); - } -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java index fc6d1fa9e1a..622c7729dd4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java @@ -51,8 +51,8 @@ public abstract class MetadataFieldMapper extends FieldMapper { } public abstract static class Builder extends FieldMapper.Builder { - public Builder(String name, MappedFieldType fieldType) { - super(name, fieldType); + public Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType) { + super(name, fieldType, defaultFieldType); } } @@ -70,4 +70,8 @@ public abstract class MetadataFieldMapper extends FieldMapper { */ public abstract void postParse(ParseContext context) throws IOException; + @Override + public MetadataFieldMapper merge(Mapper mergeWith, boolean updateAllTypes) { + return (MetadataFieldMapper) super.merge(mergeWith, updateAllTypes); + } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java index edf75621c1e..3c12f51a7f7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java @@ -34,10 +34,8 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.object.RootObjectMapper; import java.util.ArrayList; -import java.util.HashMap; import java.util.Iterator; import java.util.List; -import java.util.Map; /** * @@ -288,16 +286,6 @@ public abstract class ParseContext { return in.id(); } - @Override - public void ignoredValue(String indexName, String value) { - in.ignoredValue(indexName, value); - } - - @Override - public String ignoredValue(String indexName) { - return in.ignoredValue(indexName); - } - @Override public void id(String id) { in.id(id); @@ -390,8 +378,6 @@ public abstract class ParseContext { private StringBuilder stringBuilder = new StringBuilder(); - private Map ignoredValues = new HashMap<>(); - private AllEntries allEntries = new AllEntries(); private float docBoost = 1.0f; @@ -421,7 +407,6 @@ public abstract class ParseContext { this.source = source == null ? null : sourceToParse.source(); this.path.reset(); this.allEntries = new AllEntries(); - this.ignoredValues.clear(); this.docBoost = 1.0f; this.dynamicMappingsUpdate = null; } @@ -523,16 +508,6 @@ public abstract class ParseContext { return id; } - @Override - public void ignoredValue(String indexName, String value) { - ignoredValues.put(indexName, value); - } - - @Override - public String ignoredValue(String indexName) { - return ignoredValues.get(indexName); - } - /** * Really, just the id mapper should set this. */ @@ -595,7 +570,7 @@ public abstract class ParseContext { if (dynamicMappingsUpdate == null) { dynamicMappingsUpdate = mapper; } else { - MapperUtils.merge(dynamicMappingsUpdate, mapper); + dynamicMappingsUpdate = dynamicMappingsUpdate.merge(mapper, false); } } @@ -710,10 +685,6 @@ public abstract class ParseContext { public abstract String id(); - public abstract void ignoredValue(String indexName, String value); - - public abstract String ignoredValue(String indexName); - /** * Really, just the id mapper should set this. */ diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java b/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java index ed8314c6f7d..aa35e699b2d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java @@ -128,7 +128,7 @@ public class ParsedDocument { if (dynamicMappingsUpdate == null) { dynamicMappingsUpdate = update; } else { - MapperUtils.merge(dynamicMappingsUpdate, update); + dynamicMappingsUpdate = dynamicMappingsUpdate.merge(update, false); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 7468f4fb2f6..f71267fa75b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -26,12 +26,9 @@ import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.common.Base64; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentParser; @@ -42,10 +39,8 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; -import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Objects; import static org.elasticsearch.index.mapper.MapperBuilders.binaryField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; @@ -56,9 +51,6 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; public class BinaryFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "binary"; - private static final ParseField COMPRESS = new ParseField("compress").withAllDeprecated("no replacement, implemented at the codec level"); - private static final ParseField COMPRESS_THRESHOLD = new ParseField("compress_threshold").withAllDeprecated("no replacement"); - public static class Defaults { public static final MappedFieldType FIELD_TYPE = new BinaryFieldType(); @@ -72,7 +64,7 @@ public class BinaryFieldMapper extends FieldMapper { public static class Builder extends FieldMapper.Builder { public Builder(String name) { - super(name, Defaults.FIELD_TYPE); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); builder = this; } @@ -89,14 +81,6 @@ public class BinaryFieldMapper extends FieldMapper { public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { BinaryFieldMapper.Builder builder = binaryField(name); parseField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = entry.getKey(); - if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1) && - (parserContext.parseFieldMatcher().match(fieldName, COMPRESS) || parserContext.parseFieldMatcher().match(fieldName, COMPRESS_THRESHOLD))) { - iterator.remove(); - } - } return builder; } } @@ -172,14 +156,14 @@ public class BinaryFieldMapper extends FieldMapper { return; } if (fieldType().stored()) { - fields.add(new Field(fieldType().names().indexName(), value, fieldType())); + fields.add(new Field(fieldType().name(), value, fieldType())); } if (fieldType().hasDocValues()) { - CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().names().indexName()); + CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().name()); if (field == null) { - field = new CustomBinaryDocValuesField(fieldType().names().indexName(), value); - context.doc().addWithKey(fieldType().names().indexName(), field); + field = new CustomBinaryDocValuesField(fieldType().name(), value); + context.doc().addWithKey(fieldType().name(), field); } else { field.add(value); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java index cd76fdbb047..76f8eb34a71 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java @@ -43,6 +43,7 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.booleanField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; +import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; /** * A field mapper for boolean fields. @@ -72,7 +73,7 @@ public class BooleanFieldMapper extends FieldMapper { public static class Builder extends FieldMapper.Builder { public Builder(String name) { - super(name, Defaults.FIELD_TYPE); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); this.builder = this; } @@ -107,6 +108,8 @@ public class BooleanFieldMapper extends FieldMapper { } builder.nullValue(nodeBooleanValue(propNode)); iterator.remove(); + } else if (parseMultiField(builder, name, parserContext, propName, propNode)) { + iterator.remove(); } } return builder; @@ -222,9 +225,9 @@ public class BooleanFieldMapper extends FieldMapper { if (value == null) { return; } - fields.add(new Field(fieldType().names().indexName(), value ? "T" : "F", fieldType())); + fields.add(new Field(fieldType().name(), value ? "T" : "F", fieldType())); if (fieldType().hasDocValues()) { - fields.add(new SortedNumericDocValuesField(fieldType().names().indexName(), value ? 1 : 0)); + fields.add(new SortedNumericDocValuesField(fieldType().name(), value ? 1 : 0)); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java index 61b22a1ee26..b1553d455d7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; @@ -41,6 +42,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; + import java.io.IOException; import java.util.Iterator; import java.util.List; @@ -77,8 +79,7 @@ public class ByteFieldMapper extends NumberFieldMapper { setupFieldType(context); ByteFieldMapper fieldMapper = new ByteFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (ByteFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -161,7 +162,7 @@ public class ByteFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : (int)parseValue(lowerTerm), upperTerm == null ? null : (int)parseValue(upperTerm), includeLower, includeUpper); @@ -171,7 +172,7 @@ public class ByteFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { byte iValue = parseValue(value); byte iSim = fuzziness.asByte(); - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -238,7 +239,7 @@ public class ByteFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).byteValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Byte.toString(value), boost); + context.allEntries().addText(fieldType().name(), Byte.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -249,9 +250,10 @@ public class ByteFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Byte objValue = fieldType().nullValue(); @@ -278,7 +280,7 @@ public class ByteFieldMapper extends NumberFieldMapper { } else { value = (byte) parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index 5b4df635a34..1e45780cf18 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -38,14 +38,25 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.NumberType; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.object.ArrayValueMapperParser; import org.elasticsearch.search.suggest.completion.CompletionSuggester; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; import java.io.IOException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; import static org.elasticsearch.index.mapper.MapperBuilders.completionField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; @@ -315,15 +326,15 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp CompletionFieldType other = (CompletionFieldType)fieldType; if (preservePositionIncrements != other.preservePositionIncrements) { - conflicts.add("mapper [" + names().fullName() + "] has different [preserve_position_increments] values"); + conflicts.add("mapper [" + name() + "] has different [preserve_position_increments] values"); } if (preserveSep != other.preserveSep) { - conflicts.add("mapper [" + names().fullName() + "] has different [preserve_separators] values"); + conflicts.add("mapper [" + name() + "] has different [preserve_separators] values"); } if (hasContextMappings() != other.hasContextMappings()) { - conflicts.add("mapper [" + names().fullName() + "] has different [context_mappings] values"); + conflicts.add("mapper [" + name() + "] has different [context_mappings] values"); } else if (hasContextMappings() && contextMappings.equals(other.contextMappings) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [context_mappings] values"); + conflicts.add("mapper [" + name() + "] has different [context_mappings] values"); } } @@ -356,7 +367,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp * @param name of the completion field to build */ public Builder(String name) { - super(name, new CompletionFieldType()); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); builder = this; } @@ -435,7 +446,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp Token token = parser.currentToken(); Map inputMap = new HashMap<>(1); if (token == Token.VALUE_NULL) { - throw new MapperParsingException("completion field [" + fieldType().names().fullName() + "] does not support null values"); + throw new MapperParsingException("completion field [" + fieldType().name() + "] does not support null values"); } else if (token == Token.START_ARRAY) { while ((token = parser.nextToken()) != Token.END_ARRAY) { parse(context, token, parser, inputMap); @@ -458,10 +469,10 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp } CompletionInputMetaData metaData = completionInput.getValue(); if (fieldType().hasContextMappings()) { - fieldType().getContextMappings().addField(context.doc(), fieldType().names().indexName(), + fieldType().getContextMappings().addField(context.doc(), fieldType().name(), input, metaData.weight, metaData.contexts); } else { - context.doc().add(new SuggestField(fieldType().names().indexName(), input, metaData.weight)); + context.doc().add(new SuggestField(fieldType().name(), input, metaData.weight)); } } multiFields.parse(this, context); @@ -525,7 +536,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp weight = weightValue.intValue(); } else if (Fields.CONTENT_FIELD_NAME_CONTEXTS.equals(currentFieldName)) { if (fieldType().hasContextMappings() == false) { - throw new IllegalArgumentException("contexts field is not supported for field: [" + fieldType().names().fullName() + "]"); + throw new IllegalArgumentException("contexts field is not supported for field: [" + fieldType().name() + "]"); } ContextMappings contextMappings = fieldType().getContextMappings(); XContentParser.Token currentToken = parser.currentToken(); @@ -605,11 +616,9 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith; - if (!mergeResult.simulate()) { - this.maxInputLength = fieldMergeWith.maxInputLength; - } + this.maxInputLength = fieldMergeWith.maxInputLength; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index 27b96b27a44..4b752b2b2af 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -73,7 +73,6 @@ public class DateFieldMapper extends NumberFieldMapper { public static class Defaults extends NumberFieldMapper.Defaults { public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strict_date_optional_time||epoch_millis", Locale.ROOT); - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER_BEFORE_2_0 = Joda.forPattern("date_optional_time", Locale.ROOT); public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; public static final DateFieldType FIELD_TYPE = new DateFieldType(); @@ -123,18 +122,11 @@ public class DateFieldMapper extends NumberFieldMapper { fieldType.setNullValue(nullValue); DateFieldMapper fieldMapper = new DateFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (DateFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override protected void setupFieldType(BuilderContext context) { - if (Version.indexCreated(context.indexSettings()).before(Version.V_2_0_0_beta1) && - !fieldType().dateTimeFormatter().format().contains("epoch_")) { - String format = fieldType().timeUnit().equals(TimeUnit.SECONDS) ? "epoch_second" : "epoch_millis"; - fieldType().setDateTimeFormatter(Joda.forPattern(format + "||" + fieldType().dateTimeFormatter().format())); - } - FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter; if (!locale.equals(dateTimeFormatter.locale())) { fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale)); @@ -187,11 +179,7 @@ public class DateFieldMapper extends NumberFieldMapper { } } if (!configuredFormat) { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER); - } else { - builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER_BEFORE_2_0); - } + builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER); } return builder; } @@ -250,7 +238,7 @@ public class DateFieldMapper extends NumberFieldMapper { @Override public String toString(String s) { final StringBuilder sb = new StringBuilder(); - return sb.append(names().indexName()).append(':') + return sb.append(name()).append(':') .append(includeLower ? '[' : '{') .append((lowerTerm == null) ? "*" : lowerTerm.toString()) .append(" TO ") @@ -307,13 +295,13 @@ public class DateFieldMapper extends NumberFieldMapper { if (strict) { DateFieldType other = (DateFieldType)fieldType; if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [format] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [format] across all types."); } if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types."); } if (Objects.equals(timeUnit(), other.timeUnit()) == false) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types."); } } } @@ -405,7 +393,7 @@ public class DateFieldMapper extends NumberFieldMapper { // not a time format iSim = fuzziness.asLong(); } - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -425,7 +413,7 @@ public class DateFieldMapper extends NumberFieldMapper { } private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), includeLower, includeUpper); @@ -490,7 +478,8 @@ public class DateFieldMapper extends NumberFieldMapper { dateAsString = fieldType().nullValueAsString(); } else if (token == XContentParser.Token.VALUE_NUMBER) { dateAsString = parser.text(); - } else if (token == XContentParser.Token.START_OBJECT) { + } else if (token == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -517,7 +506,7 @@ public class DateFieldMapper extends NumberFieldMapper { Long value = null; if (dateAsString != null) { if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), dateAsString, boost); + context.allEntries().addText(fieldType().name(), dateAsString, boost); } value = fieldType().parseStringValue(dateAsString); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java index 0e512bf4281..0497fcd394c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; @@ -80,8 +81,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { setupFieldType(context); DoubleFieldMapper fieldMapper = new DoubleFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (DoubleFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -165,7 +165,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseDoubleValue(lowerTerm), upperTerm == null ? null : parseDoubleValue(upperTerm), includeLower, includeUpper); @@ -175,7 +175,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { double iValue = parseDoubleValue(value); double iSim = fuzziness.asDouble(); - return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -231,7 +231,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).doubleValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Double.toString(value), boost); + context.allEntries().addText(fieldType().name(), Double.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -242,9 +242,10 @@ public class DoubleFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Double objValue = fieldType().nullValue(); @@ -271,7 +272,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { } else { value = parser.doubleValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java index 9a607ffd415..9aa690e0515 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; @@ -81,8 +82,7 @@ public class FloatFieldMapper extends NumberFieldMapper { setupFieldType(context); FloatFieldMapper fieldMapper = new FloatFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (FloatFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -166,7 +166,7 @@ public class FloatFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -176,7 +176,7 @@ public class FloatFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { float iValue = parseValue(value); final float iSim = fuzziness.asFloat(); - return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -243,7 +243,7 @@ public class FloatFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).floatValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Float.toString(value), boost); + context.allEntries().addText(fieldType().name(), Float.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -254,9 +254,10 @@ public class FloatFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Float objValue = fieldType().nullValue(); @@ -283,7 +284,7 @@ public class FloatFieldMapper extends NumberFieldMapper { } else { value = parser.floatValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java index 868cfeb4380..343e0b8611c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; @@ -43,6 +44,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; + import java.io.IOException; import java.util.Iterator; import java.util.List; @@ -85,8 +87,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { IntegerFieldMapper fieldMapper = new IntegerFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (IntegerFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -170,7 +171,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -180,7 +181,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { int iValue = parseValue(value); int iSim = fuzziness.asInt(); - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -247,7 +248,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).intValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Integer.toString(value), boost); + context.allEntries().addText(fieldType().name(), Integer.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -258,9 +259,10 @@ public class IntegerFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Integer objValue = fieldType().nullValue(); @@ -287,7 +289,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { } else { value = parser.intValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java index 4130c902586..70261d7dc43 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; @@ -43,6 +44,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; + import java.io.IOException; import java.util.Iterator; import java.util.List; @@ -84,8 +86,7 @@ public class LongFieldMapper extends NumberFieldMapper { setupFieldType(context); LongFieldMapper fieldMapper = new LongFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (LongFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -168,7 +169,7 @@ public class LongFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseLongValue(lowerTerm), upperTerm == null ? null : parseLongValue(upperTerm), includeLower, includeUpper); @@ -178,7 +179,7 @@ public class LongFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { long iValue = parseLongValue(value); final long iSim = fuzziness.asLong(); - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -235,7 +236,7 @@ public class LongFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).longValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Long.toString(value), boost); + context.allEntries().addText(fieldType().name(), Long.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -246,9 +247,10 @@ public class LongFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Long objValue = fieldType().nullValue(); @@ -275,7 +277,7 @@ public class LongFieldMapper extends NumberFieldMapper { } else { value = parser.longValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 87a63de99ec..a0a5e5e5bce 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -36,7 +36,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import java.io.IOException; @@ -49,7 +53,7 @@ import java.util.List; public abstract class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { public static class Defaults { - + public static final int PRECISION_STEP_8_BIT = Integer.MAX_VALUE; // 1tpv: 256 terms at most, not useful public static final int PRECISION_STEP_16_BIT = 8; // 2tpv public static final int PRECISION_STEP_32_BIT = 8; // 4tpv @@ -64,9 +68,9 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM private Boolean ignoreMalformed; private Boolean coerce; - + public Builder(String name, MappedFieldType fieldType, int defaultPrecisionStep) { - super(name, fieldType); + super(name, fieldType, fieldType); this.fieldType.setNumericPrecisionStep(defaultPrecisionStep); } @@ -89,7 +93,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM } return Defaults.IGNORE_MALFORMED; } - + public T coerce(boolean coerce) { this.coerce = coerce; return builder; @@ -140,7 +144,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM List conflicts, boolean strict) { super.checkCompatibility(other, conflicts, strict); if (numericPrecisionStep() != other.numericPrecisionStep()) { - conflicts.add("mapper [" + names().fullName() + "] has different [precision_step] values"); + conflicts.add("mapper [" + name() + "] has different [precision_step] values"); } } @@ -173,7 +177,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM protected Explicit ignoreMalformed; protected Explicit coerce; - + protected NumberFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, Explicit coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { @@ -183,22 +187,41 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM } @Override - public void includeInAll(Boolean includeInAll) { + protected NumberFieldMapper clone() { + return (NumberFieldMapper) super.clone(); + } + + @Override + public Mapper includeInAll(Boolean includeInAll) { if (includeInAll != null) { - this.includeInAll = includeInAll; + NumberFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; } } @Override - public void includeInAllIfNotSet(Boolean includeInAll) { + public Mapper includeInAllIfNotSet(Boolean includeInAll) { if (includeInAll != null && this.includeInAll == null) { - this.includeInAll = includeInAll; + NumberFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; } } @Override - public void unsetIncludeInAll() { - includeInAll = null; + public Mapper unsetIncludeInAll() { + if (includeInAll != null) { + NumberFieldMapper clone = clone(); + clone.includeInAll = null; + return clone; + } else { + return this; + } } @Override @@ -220,7 +243,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM protected abstract void innerParseCreateField(ParseContext context, List fields) throws IOException; protected final void addDocValue(ParseContext context, List fields, long value) { - fields.add(new SortedNumericDocValuesField(fieldType().names().indexName(), value)); + fields.add(new SortedNumericDocValuesField(fieldType().name(), value)); } /** @@ -254,21 +277,16 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith; - if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { - this.includeInAll = nfmMergeWith.includeInAll; - if (nfmMergeWith.ignoreMalformed.explicit()) { - this.ignoreMalformed = nfmMergeWith.ignoreMalformed; - } - if (nfmMergeWith.coerce.explicit()) { - this.coerce = nfmMergeWith.coerce; - } + this.includeInAll = nfmMergeWith.includeInAll; + if (nfmMergeWith.ignoreMalformed.explicit()) { + this.ignoreMalformed = nfmMergeWith.ignoreMalformed; + } + if (nfmMergeWith.coerce.explicit()) { + this.coerce = nfmMergeWith.coerce; } } @@ -311,7 +329,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM }; public CustomNumericField(Number value, MappedFieldType fieldType) { - super(fieldType.names().indexName(), fieldType); + super(fieldType.name(), fieldType); if (value != null) { this.fieldsData = value; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java index 81ed6cc3bac..fdd7ab34819 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; @@ -43,6 +44,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; + import java.io.IOException; import java.util.Iterator; import java.util.List; @@ -81,8 +83,7 @@ public class ShortFieldMapper extends NumberFieldMapper { ShortFieldMapper fieldMapper = new ShortFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (ShortFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -166,7 +167,7 @@ public class ShortFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : (int)parseValue(lowerTerm), upperTerm == null ? null : (int)parseValue(upperTerm), includeLower, includeUpper); @@ -176,7 +177,7 @@ public class ShortFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { short iValue = parseValue(value); short iSim = fuzziness.asShort(); - return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -243,7 +244,7 @@ public class ShortFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).shortValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), Short.toString(value), boost); + context.allEntries().addText(fieldType().name(), Short.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -254,9 +255,10 @@ public class ShortFieldMapper extends NumberFieldMapper { } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost); + context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; Short objValue = fieldType().nullValue(); @@ -283,7 +285,7 @@ public class ShortFieldMapper extends NumberFieldMapper { } else { value = parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost); + context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 0a921ad85eb..46b4097c2c0 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; @@ -46,8 +45,8 @@ import java.util.Map; import static org.apache.lucene.index.IndexOptions.NONE; import static org.elasticsearch.index.mapper.MapperBuilders.stringField; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; +import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField; public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { @@ -70,19 +69,8 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc * values. */ public static final int POSITION_INCREMENT_GAP = 100; - public static final int POSITION_INCREMENT_GAP_PRE_2_0 = 0; public static final int IGNORE_ABOVE = -1; - - /** - * The default position_increment_gap for a particular version of Elasticsearch. - */ - public static int positionIncrementGap(Version version) { - if (version.before(Version.V_2_0_0_beta1)) { - return POSITION_INCREMENT_GAP_PRE_2_0; - } - return POSITION_INCREMENT_GAP; - } } public static class Builder extends FieldMapper.Builder { @@ -99,7 +87,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc protected int ignoreAbove = Defaults.IGNORE_ABOVE; public Builder(String name) { - super(name, Defaults.FIELD_TYPE); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); builder = this; } @@ -150,8 +138,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc StringFieldMapper fieldMapper = new StringFieldMapper( name, fieldType, defaultFieldType, positionIncrementGap, ignoreAbove, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return fieldMapper.includeInAll(includeInAll); } } @@ -177,8 +164,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc } builder.searchQuotedAnalyzer(analyzer); iterator.remove(); - } else if (propName.equals("position_increment_gap") || - parserContext.indexVersionCreated().before(Version.V_2_0_0) && propName.equals("position_offset_gap")) { + } else if (propName.equals("position_increment_gap")) { int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1); if (newPositionIncrementGap < 0) { throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed."); @@ -250,29 +236,48 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) { - throw new MapperParsingException("Field [" + fieldType.names().fullName() + "] cannot be analyzed and have doc values"); + throw new MapperParsingException("Field [" + fieldType.name() + "] cannot be analyzed and have doc values"); } this.positionIncrementGap = positionIncrementGap; this.ignoreAbove = ignoreAbove; } @Override - public void includeInAll(Boolean includeInAll) { + protected StringFieldMapper clone() { + return (StringFieldMapper) super.clone(); + } + + @Override + public StringFieldMapper includeInAll(Boolean includeInAll) { if (includeInAll != null) { - this.includeInAll = includeInAll; + StringFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; } } @Override - public void includeInAllIfNotSet(Boolean includeInAll) { + public StringFieldMapper includeInAllIfNotSet(Boolean includeInAll) { if (includeInAll != null && this.includeInAll == null) { - this.includeInAll = includeInAll; + StringFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; } } @Override - public void unsetIncludeInAll() { - includeInAll = null; + public StringFieldMapper unsetIncludeInAll() { + if (includeInAll != null) { + StringFieldMapper clone = clone(); + clone.includeInAll = null; + return clone; + } else { + return this; + } } @Override @@ -298,19 +303,16 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc return; } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), valueAndBoost.value(), valueAndBoost.boost()); + context.allEntries().addText(fieldType().name(), valueAndBoost.value(), valueAndBoost.boost()); } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - Field field = new Field(fieldType().names().indexName(), valueAndBoost.value(), fieldType()); + Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType()); field.setBoost(valueAndBoost.boost()); fields.add(field); } if (fieldType().hasDocValues()) { - fields.add(new SortedSetDocValuesField(fieldType().names().indexName(), new BytesRef(valueAndBoost.value()))); - } - if (fields.isEmpty()) { - context.ignoredValue(fieldType().names().indexName(), valueAndBoost.value()); + fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(valueAndBoost.value()))); } } @@ -324,13 +326,14 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc */ public static ValueAndBoost parseCreateFieldForString(ParseContext context, String nullValue, float defaultBoost) throws IOException { if (context.externalValueSet()) { - return new ValueAndBoost((String) context.externalValue(), defaultBoost); + return new ValueAndBoost(context.externalValue().toString(), defaultBoost); } XContentParser parser = context.parser(); if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { return new ValueAndBoost(nullValue, defaultBoost); } - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + if (parser.currentToken() == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; String value = nullValue; @@ -359,15 +362,10 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } - if (!mergeResult.simulate()) { - this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll; - this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove; - } + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); + this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll; + this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java index 8348892e44a..85df5ea3d3b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost; @@ -81,8 +80,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), analyzer, multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (TokenCountFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -149,9 +147,6 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { } addIntegerFields(context, fields, count, valueAndBoost.boost()); } - if (fields.isEmpty()) { - context.ignoredValue(fieldType().names().indexName(), valueAndBoost.value()); - } } /** @@ -190,14 +185,9 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } - if (!mergeResult.simulate()) { - this.analyzer = ((TokenCountFieldMapper) mergeWith).analyzer; - } + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); + this.analyzer = ((TokenCountFieldMapper) mergeWith).analyzer; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index e530243657c..d7f3570a53c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -29,15 +29,15 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.similarity.SimilarityProvider; +import org.elasticsearch.index.similarity.SimilarityService; -import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -56,99 +56,6 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeSt */ public class TypeParsers { - public static final String MULTI_FIELD_CONTENT_TYPE = "multi_field"; - public static final Mapper.TypeParser multiFieldConverterTypeParser = new Mapper.TypeParser() { - - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - ContentPath.Type pathType = null; - FieldMapper.Builder mainFieldBuilder = null; - List fields = null; - String firstType = null; - - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = Strings.toUnderscoreCase(entry.getKey()); - Object fieldNode = entry.getValue(); - if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - pathType = parsePathType(name, fieldNode.toString()); - iterator.remove(); - } else if (fieldName.equals("fields")) { - Map fieldsNode = (Map) fieldNode; - for (Iterator> fieldsIterator = fieldsNode.entrySet().iterator(); fieldsIterator.hasNext();) { - Map.Entry entry1 = fieldsIterator.next(); - String propName = entry1.getKey(); - Map propNode = (Map) entry1.getValue(); - - String type; - Object typeNode = propNode.get("type"); - if (typeNode != null) { - type = typeNode.toString(); - if (firstType == null) { - firstType = type; - } - } else { - throw new MapperParsingException("no type specified for property [" + propName + "]"); - } - - Mapper.TypeParser typeParser = parserContext.typeParser(type); - if (typeParser == null) { - throw new MapperParsingException("no handler for type [" + type + "] declared on field [" + fieldName + "]"); - } - if (propName.equals(name)) { - mainFieldBuilder = (FieldMapper.Builder) typeParser.parse(propName, propNode, parserContext); - fieldsIterator.remove(); - } else { - if (fields == null) { - fields = new ArrayList<>(2); - } - fields.add((FieldMapper.Builder) typeParser.parse(propName, propNode, parserContext)); - fieldsIterator.remove(); - } - } - fieldsNode.remove("type"); - DocumentMapperParser.checkNoRemainingFields(fieldName, fieldsNode, parserContext.indexVersionCreated()); - iterator.remove(); - } - } - - if (mainFieldBuilder == null) { - if (fields == null) { - // No fields at all were specified in multi_field, so lets return a non indexed string field. - return new StringFieldMapper.Builder(name).index(false); - } - Mapper.TypeParser typeParser = parserContext.typeParser(firstType); - if (typeParser == null) { - // The first multi field's type is unknown - mainFieldBuilder = new StringFieldMapper.Builder(name).index(false); - } else { - Mapper.Builder substitute = typeParser.parse(name, Collections.emptyMap(), parserContext); - if (substitute instanceof FieldMapper.Builder) { - mainFieldBuilder = ((FieldMapper.Builder) substitute).index(false); - } else { - // The first multi isn't a core field type - mainFieldBuilder = new StringFieldMapper.Builder(name).index(false); - } - } - } - - if (fields != null && pathType != null) { - for (Mapper.Builder field : fields) { - mainFieldBuilder.addMultiField(field); - } - mainFieldBuilder.multiFieldPathType(pathType); - } else if (fields != null) { - for (Mapper.Builder field : fields) { - mainFieldBuilder.addMultiField(field); - } - } else if (pathType != null) { - mainFieldBuilder.multiFieldPathType(pathType); - } - return mainFieldBuilder; - } - - }; - public static final String DOC_VALUES = "doc_values"; public static final String INDEX_OPTIONS_DOCS = "docs"; public static final String INDEX_OPTIONS_FREQS = "freqs"; @@ -174,7 +81,8 @@ public class TypeParsers { builder.omitNorms(nodeBooleanValue(propNode)); iterator.remove(); } else if (propName.equals("similarity")) { - builder.similarity(parserContext.getSimilarity(propNode.toString())); + SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); + builder.similarity(similarityProvider); iterator.remove(); } else if (parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); @@ -205,9 +113,7 @@ public class TypeParsers { } else if (propName.equals("store_term_vector_payloads")) { builder.storeTermVectorPayloads(nodeBooleanValue(propNode)); iterator.remove(); - } else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0 - propName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - + } else if (propName.equals("analyzer")) { NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); if (analyzer == null) { throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); @@ -253,10 +159,7 @@ public class TypeParsers { Map.Entry entry = iterator.next(); final String propName = Strings.toUnderscoreCase(entry.getKey()); final Object propNode = entry.getValue(); - if (propName.equals("index_name") && indexVersionCreated.before(Version.V_2_0_0_beta1)) { - builder.indexName(propNode.toString()); - iterator.remove(); - } else if (propName.equals("store")) { + if (propName.equals("store")) { builder.store(parseStore(name, propNode.toString())); iterator.remove(); } else if (propName.equals("index")) { @@ -287,28 +190,15 @@ public class TypeParsers { } DocumentMapperParser.checkNoRemainingFields(propName, properties, parserContext.indexVersionCreated()); iterator.remove(); - } else if (propName.equals("omit_term_freq_and_positions")) { - final IndexOptions op = nodeBooleanValue(propNode) ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; - if (indexVersionCreated.onOrAfter(Version.V_1_0_0_RC2)) { - throw new ElasticsearchParseException("'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : 'docs'] instead"); - } - // deprecated option for BW compat - builder.indexOptions(op); - iterator.remove(); } else if (propName.equals("index_options")) { builder.indexOptions(nodeIndexOptionValue(propNode)); iterator.remove(); } else if (propName.equals("include_in_all")) { builder.includeInAll(nodeBooleanValue(propNode)); iterator.remove(); - } else if (propName.equals("postings_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) { - // ignore for old indexes - iterator.remove(); - } else if (propName.equals("doc_values_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) { - // ignore for old indexes - iterator.remove(); } else if (propName.equals("similarity")) { - builder.similarity(parserContext.getSimilarity(propNode.toString())); + SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); + builder.similarity(similarityProvider); iterator.remove(); } else if (propName.equals("fielddata")) { final Settings settings = Settings.builder().put(SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(propNode, "fielddata"))).build(); @@ -337,10 +227,7 @@ public class TypeParsers { public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) { parserContext = parserContext.createMultiFieldContext(parserContext); - if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.multiFieldPathType(parsePathType(name, propNode.toString())); - return true; - } else if (propName.equals("fields")) { + if (propName.equals("fields")) { final Map multiFieldsPropNodes; @@ -457,17 +344,6 @@ public class TypeParsers { } } - public static ContentPath.Type parsePathType(String name, String path) throws MapperParsingException { - path = Strings.toUnderscoreCase(path); - if ("just_name".equals(path)) { - return ContentPath.Type.JUST_NAME; - } else if ("full".equals(path)) { - return ContentPath.Type.FULL; - } else { - throw new MapperParsingException("wrong value for pathType [" + path + "] for object [" + name + "]"); - } - } - @SuppressWarnings("unchecked") public static void parseCopyFields(Object propNode, FieldMapper.Builder builder) { FieldMapper.CopyTo.Builder copyToBuilder = new FieldMapper.CopyTo.Builder(); @@ -481,4 +357,15 @@ public class TypeParsers { builder.copyTo(copyToBuilder.build()); } + private static SimilarityProvider resolveSimilarity(Mapper.TypeParser.ParserContext parserContext, String name, String value) { + if (parserContext.indexVersionCreated().before(Version.V_3_0_0) && "default".equals(value)) { + // "default" similarity has been renamed into "classic" in 3.x. + value = SimilarityService.DEFAULT_SIMILARITY; + } + SimilarityProvider similarityProvider = parserContext.getSimilarity(value); + if (similarityProvider == null) { + throw new MapperParsingException("Unknown Similarity type [" + value + "] for [" + name + "]"); + } + return similarityProvider; + } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 0b57d866ddd..52202fac716 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -33,12 +33,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; @@ -49,7 +47,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.Locale; import java.util.Map; import static org.elasticsearch.index.mapper.MapperBuilders.doubleField; @@ -74,7 +71,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } public static class Defaults { - public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL; public static final boolean ENABLE_LATLON = false; public static final boolean ENABLE_GEOHASH = false; public static final boolean ENABLE_GEOHASH_PREFIX = false; @@ -83,7 +79,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } public abstract static class Builder extends FieldMapper.Builder { - protected ContentPath.Type pathType = Defaults.PATH_TYPE; protected boolean enableLatLon = Defaults.ENABLE_LATLON; @@ -98,7 +93,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr protected Boolean ignoreMalformed; public Builder(String name, GeoPointFieldType fieldType) { - super(name, fieldType); + super(name, fieldType, fieldType); } @Override @@ -106,12 +101,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr return (GeoPointFieldType)fieldType; } - @Override - public T multiFieldPathType(ContentPath.Type pathType) { - this.pathType = pathType; - return builder; - } - @Override public T fieldDataSettings(Settings settings) { this.fieldDataSettings = settings; @@ -159,13 +148,10 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } public abstract Y build(BuilderContext context, String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, + Settings indexSettings, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo); public Y build(Mapper.BuilderContext context) { - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(pathType); - GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType; DoubleFieldMapper latMapper = null; @@ -191,9 +177,8 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr geoPointFieldType.setGeoHashEnabled(geoHashMapper.fieldType(), geoHashPrecision, enableGeoHashPrefix); } context.path().remove(); - context.path().pathType(origPathType); - return build(context, name, fieldType, defaultFieldType, context.indexSettings(), origPathType, + return build(context, name, fieldType, defaultFieldType, context.indexSettings(), latMapper, lonMapper, geoHashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo); } } @@ -302,20 +287,20 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr super.checkCompatibility(fieldType, conflicts, strict); GeoPointFieldType other = (GeoPointFieldType)fieldType; if (isLatLonEnabled() != other.isLatLonEnabled()) { - conflicts.add("mapper [" + names().fullName() + "] has different [lat_lon]"); + conflicts.add("mapper [" + name() + "] has different [lat_lon]"); } if (isLatLonEnabled() && other.isLatLonEnabled() && latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) { - conflicts.add("mapper [" + names().fullName() + "] has different [precision_step]"); + conflicts.add("mapper [" + name() + "] has different [precision_step]"); } if (isGeoHashEnabled() != other.isGeoHashEnabled()) { - conflicts.add("mapper [" + names().fullName() + "] has different [geohash]"); + conflicts.add("mapper [" + name() + "] has different [geohash]"); } if (geoHashPrecision() != other.geoHashPrecision()) { - conflicts.add("mapper [" + names().fullName() + "] has different [geohash_precision]"); + conflicts.add("mapper [" + name() + "] has different [geohash_precision]"); } if (isGeoHashPrefixEnabled() != other.isGeoHashPrefixEnabled()) { - conflicts.add("mapper [" + names().fullName() + "] has different [geohash_prefix]"); + conflicts.add("mapper [" + name() + "] has different [geohash_prefix]"); } } @@ -361,21 +346,18 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } } - protected final DoubleFieldMapper latMapper; + protected DoubleFieldMapper latMapper; - protected final DoubleFieldMapper lonMapper; + protected DoubleFieldMapper lonMapper; - protected final ContentPath.Type pathType; - - protected final StringFieldMapper geoHashMapper; + protected StringFieldMapper geoHashMapper; protected Explicit ignoreMalformed; protected BaseGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, + DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - this.pathType = pathType; this.latMapper = latMapper; this.lonMapper = lonMapper; this.geoHashMapper = geoHashMapper; @@ -388,17 +370,11 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } - + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); BaseGeoPointFieldMapper gpfmMergeWith = (BaseGeoPointFieldMapper) mergeWith; - if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { - if (gpfmMergeWith.ignoreMalformed.explicit()) { - this.ignoreMalformed = gpfmMergeWith.ignoreMalformed; - } + if (gpfmMergeWith.ignoreMalformed.explicit()) { + this.ignoreMalformed = gpfmMergeWith.ignoreMalformed; } } @@ -436,13 +412,11 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr latMapper.parse(context.createExternalValueContext(point.lat())); lonMapper.parse(context.createExternalValueContext(point.lon())); } - multiFields.parse(this, context); + multiFields.parse(this, context.createExternalValueContext(point)); } @Override public Mapper parse(ParseContext context) throws IOException { - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(pathType); context.path().add(simpleName()); GeoPoint sparse = context.parseExternalValue(GeoPoint.class); @@ -487,7 +461,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } context.path().remove(); - context.path().pathType(origPathType); return null; } @@ -512,9 +485,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || pathType != Defaults.PATH_TYPE) { - builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); - } if (includeDefaults || fieldType().isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) { builder.field("lat_lon", fieldType().isLatLonEnabled()); } @@ -534,4 +504,25 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); } } -} \ No newline at end of file + + @Override + public FieldMapper updateFieldType(Map fullNameToFieldType) { + BaseGeoPointFieldMapper updated = (BaseGeoPointFieldMapper) super.updateFieldType(fullNameToFieldType); + StringFieldMapper geoUpdated = geoHashMapper == null ? null : (StringFieldMapper) geoHashMapper.updateFieldType(fullNameToFieldType); + DoubleFieldMapper latUpdated = latMapper == null ? null : (DoubleFieldMapper) latMapper.updateFieldType(fullNameToFieldType); + DoubleFieldMapper lonUpdated = lonMapper == null ? null : (DoubleFieldMapper) lonMapper.updateFieldType(fullNameToFieldType); + if (updated == this + && geoUpdated == geoHashMapper + && latUpdated == latMapper + && lonUpdated == lonMapper) { + return this; + } + if (updated == this) { + updated = (BaseGeoPointFieldMapper) updated.clone(); + } + updated.geoHashMapper = geoUpdated; + updated.latMapper = latUpdated; + updated.lonMapper = lonUpdated; + return updated; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 286aca29727..71309d2fa2d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -64,7 +63,6 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { FIELD_TYPE.setNumericPrecisionStep(GeoPointField.PRECISION_STEP); FIELD_TYPE.setDocValuesType(DocValuesType.SORTED_NUMERIC); FIELD_TYPE.setHasDocValues(true); - FIELD_TYPE.setStored(true); FIELD_TYPE.freeze(); } } @@ -81,12 +79,12 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { @Override public GeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType, - MappedFieldType defaultFieldType, Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper, + MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { fieldType.setTokenized(false); setupFieldType(context); - return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, + return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, ignoreMalformed, copyTo); } @@ -104,9 +102,9 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { } public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, + DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, geoHashMapper, multiFields, + super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, ignoreMalformed, copyTo); } @@ -124,8 +122,8 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { GeoUtils.normalizePoint(point); } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - context.doc().add(new GeoPointField(fieldType().names().indexName(), point.lon(), point.lat(), fieldType() )); + context.doc().add(new GeoPointField(fieldType().name(), point.lon(), point.lat(), fieldType() )); } super.parse(context, point, geoHash); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java index 84e6bde07ac..8c954c06a5e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java @@ -35,11 +35,9 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField; @@ -111,14 +109,14 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement @Override public GeoPointFieldMapperLegacy build(BuilderContext context, String simpleName, MappedFieldType fieldType, - MappedFieldType defaultFieldType, Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper, + MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { fieldType.setTokenized(false); setupFieldType(context); fieldType.setHasDocValues(false); defaultFieldType.setHasDocValues(false); - return new GeoPointFieldMapperLegacy(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, + return new GeoPointFieldMapperLegacy(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, ignoreMalformed, coerce(context), copyTo); } @@ -129,30 +127,11 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement } public static Builder parse(Builder builder, Map node, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException { - final boolean indexCreatedBeforeV2_0 = parserContext.indexVersionCreated().before(Version.V_2_0_0); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); - if (indexCreatedBeforeV2_0 && propName.equals("validate")) { - builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (indexCreatedBeforeV2_0 && propName.equals("validate_lon")) { - builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (indexCreatedBeforeV2_0 && propName.equals("validate_lat")) { - builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (propName.equals(Names.COERCE)) { - builder.coerce = XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (indexCreatedBeforeV2_0 && propName.equals("normalize")) { - builder.coerce = XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lat")) { - builder.coerce = XContentMapValues.nodeBooleanValue(propNode); - iterator.remove(); - } else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lon")) { + if (propName.equals(Names.COERCE)) { builder.coerce = XContentMapValues.nodeBooleanValue(propNode); iterator.remove(); } @@ -288,32 +267,27 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement protected Explicit coerce; public GeoPointFieldMapperLegacy(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, + DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, Explicit coerce, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, geoHashMapper, multiFields, + super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, ignoreMalformed, copyTo); this.coerce = coerce; } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); GeoPointFieldMapperLegacy gpfmMergeWith = (GeoPointFieldMapperLegacy) mergeWith; if (gpfmMergeWith.coerce.explicit()) { if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) { - mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different [coerce]"); + throw new IllegalArgumentException("mapper [" + fieldType().name() + "] has different [coerce]"); } } - if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { - if (gpfmMergeWith.coerce.explicit()) { - this.coerce = gpfmMergeWith.coerce; - } + if (gpfmMergeWith.coerce.explicit()) { + this.coerce = gpfmMergeWith.coerce; } } @@ -337,17 +311,17 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType()); + Field field = new Field(fieldType().name(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType()); context.doc().add(field); } super.parse(context, point, geoHash); if (fieldType().hasDocValues()) { - CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().names().indexName()); + CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().name()); if (field == null) { - field = new CustomGeoPointDocValuesField(fieldType().names().indexName(), point.lat(), point.lon()); - context.doc().addWithKey(fieldType().names().indexName(), field); + field = new CustomGeoPointDocValuesField(fieldType().name(), point.lat(), point.lon()); + context.doc().addWithKey(fieldType().name(), field); } else { field.add(point.lat(), point.lon()); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 71b6d89610f..2ea59393ca0 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -30,7 +30,6 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; -import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoUtils; @@ -45,7 +44,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; @@ -97,8 +95,8 @@ public class GeoShapeFieldMapper extends FieldMapper { public static final boolean POINTS_ONLY = false; public static final int GEOHASH_LEVELS = GeoUtils.geoHashLevelsForPrecision("50m"); public static final int QUADTREE_LEVELS = GeoUtils.quadTreeLevelsForPrecision("50m"); - public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d; public static final Orientation ORIENTATION = Orientation.RIGHT; + public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d; public static final Explicit COERCE = new Explicit<>(false, false); public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType(); @@ -106,7 +104,7 @@ public class GeoShapeFieldMapper extends FieldMapper { static { // setting name here is a hack so freeze can be called...instead all these options should be // moved to the default ctor for GeoShapeFieldType, and defaultFieldType() should be removed from mappers... - FIELD_TYPE.setNames(new MappedFieldType.Names("DoesNotExist")); + FIELD_TYPE.setName("DoesNotExist"); FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(false); @@ -121,7 +119,7 @@ public class GeoShapeFieldMapper extends FieldMapper { private Boolean coerce; public Builder(String name) { - super(name, Defaults.FIELD_TYPE); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); } @Override @@ -148,12 +146,7 @@ public class GeoShapeFieldMapper extends FieldMapper { public GeoShapeFieldMapper build(BuilderContext context) { GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType; - if (geoShapeFieldType.tree.equals(Names.TREE_QUADTREE) && context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) { - geoShapeFieldType.setTree("legacyquadtree"); - } - - if (context.indexCreatedVersion().before(Version.V_2_0_0_beta1) || - (geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0)) { + if (geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0) { geoShapeFieldType.setDefaultDistanceErrorPct(Defaults.LEGACY_DISTANCE_ERROR_PCT); } setupFieldType(context); @@ -185,7 +178,7 @@ public class GeoShapeFieldMapper extends FieldMapper { builder.fieldType().setDistanceErrorPct(Double.parseDouble(fieldNode.toString())); iterator.remove(); } else if (Names.ORIENTATION.equals(fieldName)) { - builder.fieldType().setOrientation(ShapeBuilder.orientationFromString(fieldNode.toString())); + builder.fieldType().setOrientation(ShapeBuilder.Orientation.fromString(fieldNode.toString())); iterator.remove(); } else if (Names.STRATEGY.equals(fieldName)) { builder.fieldType().setStrategyName(fieldNode.toString()); @@ -279,10 +272,10 @@ public class GeoShapeFieldMapper extends FieldMapper { throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]"); } - recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, names().indexName()); + recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, name()); recursiveStrategy.setDistErrPct(distanceErrorPct()); recursiveStrategy.setPruneLeafyBranches(false); - termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, names().indexName()); + termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, name()); termStrategy.setDistErrPct(distanceErrorPct()); defaultStrategy = resolveStrategy(strategyName); defaultStrategy.setPointsOnly(pointsOnly); @@ -294,33 +287,33 @@ public class GeoShapeFieldMapper extends FieldMapper { GeoShapeFieldType other = (GeoShapeFieldType)fieldType; // prevent user from changing strategies if (strategyName().equals(other.strategyName()) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [strategy]"); + conflicts.add("mapper [" + name() + "] has different [strategy]"); } // prevent user from changing trees (changes encoding) if (tree().equals(other.tree()) == false) { - conflicts.add("mapper [" + names().fullName() + "] has different [tree]"); + conflicts.add("mapper [" + name() + "] has different [tree]"); } if ((pointsOnly() != other.pointsOnly())) { - conflicts.add("mapper [" + names().fullName() + "] has different points_only"); + conflicts.add("mapper [" + name() + "] has different points_only"); } // TODO we should allow this, but at the moment levels is used to build bookkeeping variables // in lucene's SpatialPrefixTree implementations, need a patch to correct that first if (treeLevels() != other.treeLevels()) { - conflicts.add("mapper [" + names().fullName() + "] has different [tree_levels]"); + conflicts.add("mapper [" + name() + "] has different [tree_levels]"); } if (precisionInMeters() != other.precisionInMeters()) { - conflicts.add("mapper [" + names().fullName() + "] has different [precision]"); + conflicts.add("mapper [" + name() + "] has different [precision]"); } if (strict) { if (orientation() != other.orientation()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [orientation] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [orientation] across all types."); } if (distanceErrorPct() != other.distanceErrorPct()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [distance_error_pct] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [distance_error_pct] across all types."); } } } @@ -451,7 +444,7 @@ public class GeoShapeFieldMapper extends FieldMapper { shape = shapeBuilder.build(); } if (fieldType().pointsOnly() && !(shape instanceof Point)) { - throw new MapperParsingException("[{" + fieldType().names().fullName() + "}] is configured for points only but a " + + throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a " + ((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass()) + " was found"); } Field[] fields = fieldType().defaultStrategy().createIndexableFields(shape); @@ -465,7 +458,7 @@ public class GeoShapeFieldMapper extends FieldMapper { context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e); + throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); } return null; } @@ -475,17 +468,12 @@ public class GeoShapeFieldMapper extends FieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); - if (!this.getClass().equals(mergeWith.getClass())) { - return; - } + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper)mergeWith; - if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { - if (gsfm.coerce.explicit()) { - this.coerce = gsfm.coerce; - } + if (gsfm.coerce.explicit()) { + this.coerce = gsfm.coerce; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index 645c36a4855..d9a345caf28 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -36,7 +36,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryShardContext; @@ -58,11 +57,24 @@ public class AllFieldMapper extends MetadataFieldMapper { public interface IncludeInAll { - void includeInAll(Boolean includeInAll); + /** + * If {@code includeInAll} is not null then return a copy of this mapper + * that will include values in the _all field according to {@code includeInAll}. + */ + Mapper includeInAll(Boolean includeInAll); - void includeInAllIfNotSet(Boolean includeInAll); + /** + * If {@code includeInAll} is not null and not set on this mapper yet, then + * return a copy of this mapper that will include values in the _all field + * according to {@code includeInAll}. + */ + Mapper includeInAllIfNotSet(Boolean includeInAll); - void unsetIncludeInAll(); + /** + * If {@code includeInAll} was already set on this mapper then return a copy + * of this mapper that has {@code includeInAll} not set. + */ + Mapper unsetIncludeInAll(); } public static final String NAME = "_all"; @@ -79,7 +91,7 @@ public class AllFieldMapper extends MetadataFieldMapper { static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); FIELD_TYPE.setTokenized(true); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } } @@ -89,7 +101,7 @@ public class AllFieldMapper extends MetadataFieldMapper { private EnabledAttributeMapper enabled = Defaults.ENABLED; public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); builder = this; indexName = Defaults.INDEX_NAME; } @@ -142,9 +154,6 @@ public class AllFieldMapper extends MetadataFieldMapper { if (fieldName.equals("enabled")) { builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); iterator.remove(); - } else if (fieldName.equals("auto_boost") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - // Old 1.x setting which is now ignored - iterator.remove(); } } return builder; @@ -234,7 +243,7 @@ public class AllFieldMapper extends MetadataFieldMapper { // reset the entries context.allEntries().reset(); Analyzer analyzer = findAnalyzer(context); - fields.add(new AllField(fieldType().names().indexName(), context.allEntries(), analyzer, fieldType())); + fields.add(new AllField(fieldType().name(), context.allEntries(), analyzer, fieldType())); } private Analyzer findAnalyzer(ParseContext context) { @@ -309,11 +318,11 @@ public class AllFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) { - mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); + throw new IllegalArgumentException("mapper [" + fieldType().name() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); } - super.merge(mergeWith, mergeResult); + super.doMerge(mergeWith, updateAllTypes); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java index 7883415e59a..17d1c2b9f08 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -42,7 +41,6 @@ import java.util.Map; import java.util.Objects; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** * A mapper that indexes the field names of a document under _field_names. This mapper is typically useful in order @@ -69,7 +67,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } } @@ -78,7 +76,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { private boolean enabled = Defaults.ENABLED; public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } @@ -107,14 +105,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_1_3_0)) { - throw new IllegalArgumentException("type="+CONTENT_TYPE+" is not supported on indices created before version 1.3.0. Is your cluster running multiple datanode versions?"); - } - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - parseField(builder, builder.name, node, parserContext); - } for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -175,7 +166,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { if (strict) { FieldNamesFieldType other = (FieldNamesFieldType)fieldType; if (isEnabled() != other.isEnabled()) { - conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [enabled] across all types."); + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [enabled] across all types."); } } } @@ -203,21 +194,12 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { } } - private final boolean pre13Index; // if the index was created before 1.3, _field_names is always disabled - private FieldNamesFieldMapper(Settings indexSettings, MappedFieldType existing) { this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), indexSettings); } private FieldNamesFieldMapper(MappedFieldType fieldType, Settings indexSettings) { super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); - this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0); - if (this.pre13Index) { - FieldNamesFieldType newFieldType = fieldType().clone(); - newFieldType.setEnabled(false); - newFieldType.freeze(); - fieldTypeRef.set(newFieldType); - } } @Override @@ -290,7 +272,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { for (String path : paths) { for (String fieldName : extractFieldNames(path)) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - document.add(new Field(fieldType().names().indexName(), fieldName, fieldType())); + document.add(new Field(fieldType().name(), fieldName, fieldType())); } } } @@ -304,9 +286,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (pre13Index) { - return builder; - } boolean includeDefaults = params.paramAsBoolean("include_defaults", false); if (includeDefaults == false && fieldType().isEnabled() == Defaults.ENABLED) { @@ -317,9 +296,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { if (includeDefaults || fieldType().isEnabled() != Defaults.ENABLED) { builder.field("enabled", fieldType().isEnabled()); } - if (indexCreatedBefore2x && (includeDefaults || fieldType().equals(Defaults.FIELD_TYPE) == false)) { - super.doXContentBody(builder, includeDefaults, params); - } builder.endObject(); return builder; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index 16b6c4c56da..a586a7b5b94 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -31,9 +31,7 @@ import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -44,7 +42,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; @@ -52,12 +49,9 @@ import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.Collection; -import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; - /** * */ @@ -78,26 +72,18 @@ public class IdFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } - - public static final String PATH = null; } public static class Builder extends MetadataFieldMapper.Builder { - private String path = Defaults.PATH; - public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } - public Builder path(String path) { - this.path = path; - return builder; - } // if we are indexed we use DOCS @Override protected IndexOptions getDefaultIndexOption() { @@ -107,28 +93,14 @@ public class IdFieldMapper extends MetadataFieldMapper { @Override public IdFieldMapper build(BuilderContext context) { setupFieldType(context); - return new IdFieldMapper(fieldType, path, context.indexSettings()); + return new IdFieldMapper(fieldType, context.indexSettings()); } } public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - throw new MapperParsingException(NAME + " is not configurable"); - } - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - parseField(builder, builder.name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = Strings.toUnderscoreCase(entry.getKey()); - Object fieldNode = entry.getValue(); - if (fieldName.equals("path")) { - builder.path(fieldNode.toString()); - iterator.remove(); - } - } - return builder; + throw new MapperParsingException(NAME + " is not configurable"); } @Override @@ -230,31 +202,12 @@ public class IdFieldMapper extends MetadataFieldMapper { } } - private final String path; - private IdFieldMapper(Settings indexSettings, MappedFieldType existing) { - this(idFieldType(indexSettings, existing), Defaults.PATH, indexSettings); + this(existing != null ? existing : Defaults.FIELD_TYPE, indexSettings); } - private IdFieldMapper(MappedFieldType fieldType, String path, Settings indexSettings) { + private IdFieldMapper(MappedFieldType fieldType, Settings indexSettings) { super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); - this.path = path; - } - - private static MappedFieldType idFieldType(Settings indexSettings, MappedFieldType existing) { - if (existing != null) { - return existing.clone(); - } - MappedFieldType fieldType = Defaults.FIELD_TYPE.clone(); - boolean pre2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1); - if (pre2x && indexSettings.getAsBoolean("index.mapping._id.indexed", true) == false) { - fieldType.setTokenized(false); - } - return fieldType; - } - - public String path() { - return this.path; } @Override @@ -286,10 +239,10 @@ public class IdFieldMapper extends MetadataFieldMapper { } // else we are in the pre/post parse phase if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - fields.add(new Field(fieldType().names().indexName(), context.id(), fieldType())); + fields.add(new Field(fieldType().name(), context.id(), fieldType())); } if (fieldType().hasDocValues()) { - fields.add(new BinaryDocValuesField(fieldType().names().indexName(), new BytesRef(context.id()))); + fields.add(new BinaryDocValuesField(fieldType().name(), new BytesRef(context.id()))); } } @@ -300,38 +253,11 @@ public class IdFieldMapper extends MetadataFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (indexCreatedBefore2x == false) { - return builder; - } - boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - - // if all are defaults, no sense to write it at all - if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() - && fieldType().indexOptions() == Defaults.FIELD_TYPE.indexOptions() - && path == Defaults.PATH - && hasCustomFieldDataSettings() == false) { - return builder; - } - builder.startObject(CONTENT_TYPE); - if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) { - builder.field("store", fieldType().stored()); - } - if (includeDefaults || fieldType().indexOptions() != Defaults.FIELD_TYPE.indexOptions()) { - builder.field("index", indexTokenizeOptionToString(fieldType().indexOptions() != IndexOptions.NONE, fieldType().tokenized())); - } - if (includeDefaults || path != Defaults.PATH) { - builder.field("path", path); - } - - if (includeDefaults || hasCustomFieldDataSettings()) { - builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap()); - } - builder.endObject(); return builder; } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index 962332b5c4b..d4aa2da4ab3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -24,9 +24,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; @@ -34,18 +32,14 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; -import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** * @@ -68,7 +62,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } @@ -80,7 +74,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } @@ -100,23 +94,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - return builder; - } - - parseField(builder, builder.name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = Strings.toUnderscoreCase(entry.getKey()); - Object fieldNode = entry.getValue(); - if (fieldName.equals("enabled")) { - EnabledAttributeMapper mapper = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; - builder.enabled(mapper); - iterator.remove(); - } - } - return builder; + return new Builder(parserContext.mapperService().fullName(NAME)); } @Override @@ -224,7 +202,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { } public String value(Document document) { - Field field = (Field) document.getField(fieldType().names().indexName()); + Field field = (Field) document.getField(fieldType().name()); return field == null ? null : (String)fieldType().value(field); } @@ -248,7 +226,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { if (!enabledState.enabled) { return; } - fields.add(new Field(fieldType().names().indexName(), context.index(), fieldType())); + fields.add(new Field(fieldType().name(), context.index(), fieldType())); } @Override @@ -261,30 +239,22 @@ public class IndexFieldMapper extends MetadataFieldMapper { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // if all defaults, no need to write it at all - if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED_STATE && hasCustomFieldDataSettings() == false) { + if (includeDefaults == false && enabledState == Defaults.ENABLED_STATE) { return builder; } builder.startObject(CONTENT_TYPE); - if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored())) { - builder.field("store", fieldType().stored()); - } if (includeDefaults || enabledState != Defaults.ENABLED_STATE) { builder.field("enabled", enabledState.enabled); } - if (indexCreatedBefore2x && (includeDefaults || hasCustomFieldDataSettings())) { - builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap()); - } builder.endObject(); return builder; } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { IndexFieldMapper indexFieldMapperMergeWith = (IndexFieldMapper) mergeWith; - if (!mergeResult.simulate()) { - if (indexFieldMapperMergeWith.enabledState != enabledState && !indexFieldMapperMergeWith.enabledState.unset()) { - this.enabledState = indexFieldMapperMergeWith.enabledState; - } + if (indexFieldMapperMergeWith.enabledState != enabledState && !indexFieldMapperMergeWith.enabledState.unset()) { + this.enabledState = indexFieldMapperMergeWith.enabledState; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index 760259a1802..abb9178b875 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; @@ -76,7 +75,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); JOIN_FIELD_TYPE.setHasDocValues(true); @@ -98,7 +97,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { private final MappedFieldType childJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone(); public Builder(String documentType) { - super(Defaults.NAME, Defaults.FIELD_TYPE); + super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); this.indexName = name; this.documentType = documentType; builder = this; @@ -121,9 +120,9 @@ public class ParentFieldMapper extends MetadataFieldMapper { if (parentType == null) { throw new MapperParsingException("[_parent] field mapping must contain the [type] option"); } - parentJoinFieldType.setNames(new MappedFieldType.Names(joinField(documentType))); + parentJoinFieldType.setName(joinField(documentType)); parentJoinFieldType.setFieldDataType(null); - childJoinFieldType.setNames(new MappedFieldType.Names(joinField(parentType))); + childJoinFieldType.setName(joinField(parentType)); return new ParentFieldMapper(fieldType, parentJoinFieldType, childJoinFieldType, parentType, context.indexSettings()); } } @@ -139,9 +138,6 @@ public class ParentFieldMapper extends MetadataFieldMapper { if (fieldName.equals("type")) { builder.type(fieldNode.toString()); iterator.remove(); - } else if (fieldName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - // ignore before 2.0, reject on and after 2.0 - iterator.remove(); } else if (fieldName.equals("fielddata")) { // Only take over `loading`, since that is the only option now that is configurable: Map fieldDataSettings = SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(fieldNode, "fielddata")); @@ -243,7 +239,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { } } } - return new TermsQuery(names().indexName(), bValues); + return new TermsQuery(name(), bValues); } } @@ -270,7 +266,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { private static MappedFieldType joinFieldTypeForParentType(String parentType, Settings indexSettings) { MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone(); - parentJoinFieldType.setNames(new MappedFieldType.Names(joinField(parentType))); + parentJoinFieldType.setName(joinField(parentType)); parentJoinFieldType.freeze(); return parentJoinFieldType; } @@ -313,7 +309,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { // we are in the parsing of _parent phase String parentId = context.parser().text(); context.sourceToParse().parent(parentId); - fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType())); + fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType())); addJoinFieldIfNeeded(fields, childJoinFieldType, parentId); } else { // otherwise, we are running it post processing of the xcontent @@ -325,7 +321,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { throw new MapperParsingException("No parent id provided, not within the document, and not externally"); } // we did not add it in the parsing phase, add it now - fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType())); + fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType())); addJoinFieldIfNeeded(fields, childJoinFieldType, parentId); } else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), parentType, parentId))) { throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]"); @@ -337,7 +333,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { private void addJoinFieldIfNeeded(List fields, MappedFieldType fieldType, String id) { if (fieldType.hasDocValues()) { - fields.add(new SortedDocValuesField(fieldType.names().indexName(), new BytesRef(id))); + fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(id))); } } @@ -371,11 +367,11 @@ public class ParentFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - super.merge(mergeWith, mergeResult); + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith; if (Objects.equals(parentType, fieldMergeWith.parentType) == false) { - mergeResult.addConflict("The _parent field's type option can't be changed: [" + parentType + "]->[" + fieldMergeWith.parentType + "]"); + throw new IllegalArgumentException("The _parent field's type option can't be changed: [" + parentType + "]->[" + fieldMergeWith.parentType + "]"); } List conflicts = new ArrayList<>(); @@ -383,13 +379,13 @@ public class ParentFieldMapper extends MetadataFieldMapper { parentJoinFieldType.checkCompatibility(fieldMergeWith.parentJoinFieldType, conflicts, true); // same here if (childJoinFieldType != null) { // TODO: this can be set to false when the old parent/child impl is removed, we can do eager global ordinals loading per type. - childJoinFieldType.checkCompatibility(fieldMergeWith.childJoinFieldType, conflicts, mergeResult.updateAllTypes() == false); + childJoinFieldType.checkCompatibility(fieldMergeWith.childJoinFieldType, conflicts, updateAllTypes == false); } - for (String conflict : conflicts) { - mergeResult.addConflict(conflict); + if (conflicts.isEmpty() == false) { + throw new IllegalArgumentException("Merge conflicts: " + conflicts); } - if (active() && mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { + if (active()) { childJoinFieldType = fieldMergeWith.childJoinFieldType.clone(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index 18d0645d2d5..ee06b51ecfc 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -31,7 +30,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -41,7 +39,6 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** * @@ -63,22 +60,19 @@ public class RoutingFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } public static final boolean REQUIRED = false; - public static final String PATH = null; } public static class Builder extends MetadataFieldMapper.Builder { private boolean required = Defaults.REQUIRED; - private String path = Defaults.PATH; - public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); } public Builder required(boolean required) { @@ -86,14 +80,9 @@ public class RoutingFieldMapper extends MetadataFieldMapper { return builder; } - public Builder path(String path) { - this.path = path; - return builder; - } - @Override public RoutingFieldMapper build(BuilderContext context) { - return new RoutingFieldMapper(fieldType, required, path, context.indexSettings()); + return new RoutingFieldMapper(fieldType, required, context.indexSettings()); } } @@ -101,9 +90,6 @@ public class RoutingFieldMapper extends MetadataFieldMapper { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - parseField(builder, builder.name, node, parserContext); - } for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); @@ -111,9 +97,6 @@ public class RoutingFieldMapper extends MetadataFieldMapper { if (fieldName.equals("required")) { builder.required(nodeBooleanValue(fieldNode)); iterator.remove(); - } else if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.path(fieldNode.toString()); - iterator.remove(); } } return builder; @@ -155,16 +138,14 @@ public class RoutingFieldMapper extends MetadataFieldMapper { } private boolean required; - private final String path; private RoutingFieldMapper(Settings indexSettings, MappedFieldType existing) { - this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), Defaults.REQUIRED, Defaults.PATH, indexSettings); + this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), Defaults.REQUIRED, indexSettings); } - private RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, Settings indexSettings) { + private RoutingFieldMapper(MappedFieldType fieldType, boolean required, Settings indexSettings) { super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); this.required = required; - this.path = path; } public void markAsRequired() { @@ -175,12 +156,8 @@ public class RoutingFieldMapper extends MetadataFieldMapper { return this.required; } - public String path() { - return this.path; - } - public String value(Document document) { - Field field = (Field) document.getField(fieldType().names().indexName()); + Field field = (Field) document.getField(fieldType().name()); return field == null ? null : (String)fieldType().value(field); } @@ -206,11 +183,9 @@ public class RoutingFieldMapper extends MetadataFieldMapper { if (context.sourceToParse().routing() != null) { String routing = context.sourceToParse().routing(); if (routing != null) { - if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) { - context.ignoredValue(fieldType().names().indexName(), routing); - return; + if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { + fields.add(new Field(fieldType().name(), routing, fieldType())); } - fields.add(new Field(fieldType().names().indexName(), routing, fieldType())); } } } @@ -225,31 +200,19 @@ public class RoutingFieldMapper extends MetadataFieldMapper { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // if all are defaults, no sense to write it at all - boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; - boolean indexedDefault = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE; - if (!includeDefaults && indexed == indexedDefault && - fieldType().stored() == Defaults.FIELD_TYPE.stored() && required == Defaults.REQUIRED && path == Defaults.PATH) { + if (!includeDefaults && required == Defaults.REQUIRED) { return builder; } builder.startObject(CONTENT_TYPE); - if (indexCreatedBefore2x && (includeDefaults || indexed != indexedDefault)) { - builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized())); - } - if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored())) { - builder.field("store", fieldType().stored()); - } if (includeDefaults || required != Defaults.REQUIRED) { builder.field("required", required); } - if (indexCreatedBefore2x && (includeDefaults || path != Defaults.PATH)) { - builder.field("path", path); - } builder.endObject(); return builder; } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index f9bcb31b406..b0de09edafb 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -41,11 +41,11 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; @@ -74,7 +74,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } @@ -88,7 +88,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { private String[] excludes = null; public Builder() { - super(Defaults.NAME, Defaults.FIELD_TYPE); + super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); } public Builder enabled(boolean enabled) { @@ -272,7 +272,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { if (!source.hasArray()) { source = source.toBytesArray(); } - fields.add(new StoredField(fieldType().names().indexName(), source.array(), source.arrayOffset(), source.length())); + fields.add(new StoredField(fieldType().name(), source.array(), source.arrayOffset(), source.length())); } @Override @@ -310,18 +310,20 @@ public class SourceFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith; - if (mergeResult.simulate()) { - if (this.enabled != sourceMergeWith.enabled) { - mergeResult.addConflict("Cannot update enabled setting for [_source]"); - } - if (Arrays.equals(includes(), sourceMergeWith.includes()) == false) { - mergeResult.addConflict("Cannot update includes setting for [_source]"); - } - if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) { - mergeResult.addConflict("Cannot update excludes setting for [_source]"); - } + List conflicts = new ArrayList<>(); + if (this.enabled != sourceMergeWith.enabled) { + conflicts.add("Cannot update enabled setting for [_source]"); + } + if (Arrays.equals(includes(), sourceMergeWith.includes()) == false) { + conflicts.add("Cannot update includes setting for [_source]"); + } + if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) { + conflicts.add("Cannot update excludes setting for [_source]"); + } + if (conflicts.isEmpty() == false) { + throw new IllegalArgumentException("Can't merge because of conflicts: " + conflicts); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java index 9a18befe622..4612b9fb85f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java @@ -32,7 +32,6 @@ import org.elasticsearch.index.analysis.NumericLongAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.SourceToParse; @@ -65,7 +64,7 @@ public class TTLFieldMapper extends MetadataFieldMapper { TTL_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); TTL_FIELD_TYPE.setIndexAnalyzer(NumericLongAnalyzer.buildNamedAnalyzer(Defaults.PRECISION_STEP_64_BIT)); TTL_FIELD_TYPE.setSearchAnalyzer(NumericLongAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE)); - TTL_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + TTL_FIELD_TYPE.setName(NAME); TTL_FIELD_TYPE.freeze(); } @@ -79,7 +78,7 @@ public class TTLFieldMapper extends MetadataFieldMapper { private long defaultTTL = Defaults.DEFAULT; public Builder() { - super(Defaults.NAME, Defaults.TTL_FIELD_TYPE); + super(Defaults.NAME, Defaults.TTL_FIELD_TYPE, Defaults.FIELD_TYPE); } public Builder enabled(EnabledAttributeMapper enabled) { @@ -258,21 +257,19 @@ public class TTLFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith; - if (((TTLFieldMapper) mergeWith).enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with - if (this.enabledState == EnabledAttributeMapper.ENABLED && ((TTLFieldMapper) mergeWith).enabledState == EnabledAttributeMapper.DISABLED) { - mergeResult.addConflict("_ttl cannot be disabled once it was enabled."); + if (ttlMergeWith.enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with + if (this.enabledState == EnabledAttributeMapper.ENABLED && ttlMergeWith.enabledState == EnabledAttributeMapper.DISABLED) { + throw new IllegalArgumentException("_ttl cannot be disabled once it was enabled."); } else { - if (!mergeResult.simulate()) { - this.enabledState = ttlMergeWith.enabledState; - } + this.enabledState = ttlMergeWith.enabledState; } } if (ttlMergeWith.defaultTTL != -1) { // we never build the default when the field is disabled so we should also not set it // (it does not make a difference though as everything that is not build in toXContent will also not be set in the cluster) - if (!mergeResult.simulate() && (enabledState == EnabledAttributeMapper.ENABLED)) { + if (enabledState == EnabledAttributeMapper.ENABLED) { this.defaultTTL = ttlMergeWith.defaultTTL; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index 468243d63cf..e750f973add 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; @@ -33,20 +32,19 @@ import org.elasticsearch.index.analysis.NumericDateAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; -import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.MetadataFieldMapper; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.LongFieldMapper; import java.io.IOException; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; public class TimestampFieldMapper extends MetadataFieldMapper { @@ -58,49 +56,33 @@ public class TimestampFieldMapper extends MetadataFieldMapper { public static final String NAME = "_timestamp"; // TODO: this should be removed - public static final TimestampFieldType PRE_20_FIELD_TYPE; public static final TimestampFieldType FIELD_TYPE = new TimestampFieldType(); public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT); - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER_BEFORE_2_0 = Joda.forPattern("epoch_millis||dateOptionalTime"); static { FIELD_TYPE.setStored(true); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER); FIELD_TYPE.setIndexAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Defaults.PRECISION_STEP_64_BIT)); FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Integer.MAX_VALUE)); FIELD_TYPE.setHasDocValues(true); FIELD_TYPE.freeze(); - PRE_20_FIELD_TYPE = FIELD_TYPE.clone(); - PRE_20_FIELD_TYPE.setStored(false); - PRE_20_FIELD_TYPE.setHasDocValues(false); - PRE_20_FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER_BEFORE_2_0); - PRE_20_FIELD_TYPE.setIndexAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER_BEFORE_2_0, Defaults.PRECISION_STEP_64_BIT)); - PRE_20_FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER_BEFORE_2_0, Integer.MAX_VALUE)); - PRE_20_FIELD_TYPE.freeze(); } public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_DISABLED; - public static final String PATH = null; public static final String DEFAULT_TIMESTAMP = "now"; } public static class Builder extends MetadataFieldMapper.Builder { private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; - private String path = Defaults.PATH; private String defaultTimestamp = Defaults.DEFAULT_TIMESTAMP; - private boolean explicitStore = false; private Boolean ignoreMissing = null; - public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); - if (existing != null) { - // if there is an existing type, always use that store value (only matters for < 2.0) - explicitStore = true; - } + public Builder(MappedFieldType existing, Settings settings) { + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); } @Override @@ -113,11 +95,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper { return builder; } - public Builder path(String path) { - this.path = path; - return builder; - } - public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { fieldType().setDateTimeFormatter(dateTimeFormatter); return this; @@ -135,42 +112,21 @@ public class TimestampFieldMapper extends MetadataFieldMapper { @Override public Builder store(boolean store) { - explicitStore = true; return super.store(store); } @Override public TimestampFieldMapper build(BuilderContext context) { - if (explicitStore == false && context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) { - fieldType.setStored(false); - } - - if (fieldType().dateTimeFormatter().equals(Defaults.DATE_TIME_FORMATTER)) { - fieldType().setDateTimeFormatter(getDateTimeFormatter(context.indexSettings())); - } - setupFieldType(context); - return new TimestampFieldMapper(fieldType, defaultFieldType, enabledState, path, defaultTimestamp, + return new TimestampFieldMapper(fieldType, defaultFieldType, enabledState, defaultTimestamp, ignoreMissing, context.indexSettings()); } } - private static FormatDateTimeFormatter getDateTimeFormatter(Settings indexSettings) { - Version indexCreated = Version.indexCreated(indexSettings); - if (indexCreated.onOrAfter(Version.V_2_0_0_beta1)) { - return Defaults.DATE_TIME_FORMATTER; - } else { - return Defaults.DATE_TIME_FORMATTER_BEFORE_2_0; - } - } - public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - parseField(builder, builder.name, node, parserContext); - } + Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.mapperService().getIndexSettings().getSettings()); boolean defaultSet = false; Boolean ignoreMissing = null; for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { @@ -181,23 +137,12 @@ public class TimestampFieldMapper extends MetadataFieldMapper { EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; builder.enabled(enabledState); iterator.remove(); - } else if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.path(fieldNode.toString()); - iterator.remove(); } else if (fieldName.equals("format")) { builder.dateTimeFormatter(parseDateTimeFormatter(fieldNode.toString())); iterator.remove(); } else if (fieldName.equals("default")) { if (fieldNode == null) { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_4_0_Beta1) && - parserContext.indexVersionCreated().before(Version.V_1_5_0)) { - // We are reading an index created in 1.4 with feature #7036 - // `default: null` was explicitly set. We need to change this index to - // `ignore_missing: false` - builder.ignoreMissing(false); - } else { - throw new TimestampParsingException("default timestamp can not be set to null"); - } + throw new TimestampParsingException("default timestamp can not be set to null"); } else { builder.defaultTimestamp(fieldNode.toString()); defaultSet = true; @@ -246,28 +191,19 @@ public class TimestampFieldMapper extends MetadataFieldMapper { } } - private static MappedFieldType chooseFieldType(Settings settings, MappedFieldType existing) { - if (existing != null) { - return existing; - } - return Version.indexCreated(settings).onOrAfter(Version.V_2_0_0_beta1) ? Defaults.FIELD_TYPE : Defaults.PRE_20_FIELD_TYPE; - } - private EnabledAttributeMapper enabledState; - private final String path; private final String defaultTimestamp; private final Boolean ignoreMissing; private TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) { - this(chooseFieldType(indexSettings, existing).clone(), chooseFieldType(indexSettings, null), Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, null, indexSettings); + this(existing != null ? existing : Defaults.FIELD_TYPE, Defaults.FIELD_TYPE, Defaults.ENABLED, Defaults.DEFAULT_TIMESTAMP, null, indexSettings); } - private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, String path, + private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, String defaultTimestamp, Boolean ignoreMissing, Settings indexSettings) { super(NAME, fieldType, defaultFieldType, indexSettings); this.enabledState = enabledState; - this.path = path; this.defaultTimestamp = defaultTimestamp; this.ignoreMissing = ignoreMissing; } @@ -281,10 +217,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper { return this.enabledState.enabled; } - public String path() { - return this.path; - } - public String defaultTimestamp() { return this.defaultTimestamp; } @@ -312,14 +244,11 @@ public class TimestampFieldMapper extends MetadataFieldMapper { protected void parseCreateField(ParseContext context, List fields) throws IOException { if (enabledState.enabled) { long timestamp = context.sourceToParse().timestamp(); - if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) { - context.ignoredValue(fieldType().names().indexName(), String.valueOf(timestamp)); - } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { fields.add(new LongFieldMapper.CustomLongNumericField(timestamp, fieldType())); } if (fieldType().hasDocValues()) { - fields.add(new NumericDocValuesField(fieldType().names().indexName(), timestamp)); + fields.add(new NumericDocValuesField(fieldType().name(), timestamp)); } } } @@ -332,35 +261,19 @@ public class TimestampFieldMapper extends MetadataFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; - boolean indexedDefault = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE; // if all are defaults, no sense to write it at all - if (!includeDefaults && indexed == indexedDefault && hasCustomFieldDataSettings() == false && - fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH + if (!includeDefaults && enabledState == Defaults.ENABLED && fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format()) - && Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp) - && defaultFieldType.hasDocValues() == fieldType().hasDocValues()) { + && Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)) { return builder; } builder.startObject(CONTENT_TYPE); if (includeDefaults || enabledState != Defaults.ENABLED) { builder.field("enabled", enabledState.enabled); } - if (indexCreatedBefore2x && (includeDefaults || (indexed != indexedDefault) || (fieldType().tokenized() != Defaults.FIELD_TYPE.tokenized()))) { - builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized())); - } - if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() != Defaults.PRE_20_FIELD_TYPE.stored())) { - builder.field("store", fieldType().stored()); - } - if (indexCreatedBefore2x) { - doXContentDocValues(builder, includeDefaults); - } - if (indexCreatedBefore2x && (includeDefaults || path != Defaults.PATH)) { - builder.field("path", path); - } // different format handling depending on index version - String defaultDateFormat = indexCreatedBefore2x ? Defaults.DATE_TIME_FORMATTER_BEFORE_2_0.format() : Defaults.DATE_TIME_FORMATTER.format(); + String defaultDateFormat = Defaults.DATE_TIME_FORMATTER.format(); if (includeDefaults || !fieldType().dateTimeFormatter().format().equals(defaultDateFormat)) { builder.field("format", fieldType().dateTimeFormatter().format()); } @@ -370,40 +283,31 @@ public class TimestampFieldMapper extends MetadataFieldMapper { if (includeDefaults || ignoreMissing != null) { builder.field("ignore_missing", ignoreMissing); } - if (indexCreatedBefore2x && (includeDefaults || hasCustomFieldDataSettings())) { - builder.field("fielddata", fieldType().fieldDataType().getSettings().getAsMap()); - } builder.endObject(); return builder; } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { TimestampFieldMapper timestampFieldMapperMergeWith = (TimestampFieldMapper) mergeWith; - super.merge(mergeWith, mergeResult); - if (!mergeResult.simulate()) { - if (timestampFieldMapperMergeWith.enabledState != enabledState && !timestampFieldMapperMergeWith.enabledState.unset()) { - this.enabledState = timestampFieldMapperMergeWith.enabledState; - } - } else { - if (timestampFieldMapperMergeWith.defaultTimestamp() == null && defaultTimestamp == null) { - return; - } - if (defaultTimestamp == null) { - mergeResult.addConflict("Cannot update default in _timestamp value. Value is null now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); - } else if (timestampFieldMapperMergeWith.defaultTimestamp() == null) { - mergeResult.addConflict("Cannot update default in _timestamp value. Value is \" + defaultTimestamp.toString() + \" now encountering null"); - } else if (!timestampFieldMapperMergeWith.defaultTimestamp().equals(defaultTimestamp)) { - mergeResult.addConflict("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); - } - if (this.path != null) { - if (path.equals(timestampFieldMapperMergeWith.path()) == false) { - mergeResult.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is " + (timestampFieldMapperMergeWith.path() == null ? "missing" : timestampFieldMapperMergeWith.path())); - } - } else if (timestampFieldMapperMergeWith.path() != null) { - mergeResult.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is missing"); - } + super.doMerge(mergeWith, updateAllTypes); + if (timestampFieldMapperMergeWith.enabledState != enabledState && !timestampFieldMapperMergeWith.enabledState.unset()) { + this.enabledState = timestampFieldMapperMergeWith.enabledState; + } + if (timestampFieldMapperMergeWith.defaultTimestamp() == null && defaultTimestamp == null) { + return; + } + List conflicts = new ArrayList<>(); + if (defaultTimestamp == null) { + conflicts.add("Cannot update default in _timestamp value. Value is null now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); + } else if (timestampFieldMapperMergeWith.defaultTimestamp() == null) { + conflicts.add("Cannot update default in _timestamp value. Value is \" + defaultTimestamp.toString() + \" now encountering null"); + } else if (!timestampFieldMapperMergeWith.defaultTimestamp().equals(defaultTimestamp)) { + conflicts.add("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); + } + if (conflicts.isEmpty() == false) { + throw new IllegalArgumentException("Conflicts: " + conflicts); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index d4acc3c5975..72defadf6fd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -30,8 +30,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -40,7 +38,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; @@ -50,8 +47,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; - /** * */ @@ -73,7 +68,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } } @@ -81,13 +76,13 @@ public class TypeFieldMapper extends MetadataFieldMapper { public static class Builder extends MetadataFieldMapper.Builder { public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } @Override public TypeFieldMapper build(BuilderContext context) { - fieldType.setNames(buildNames(context)); + fieldType.setName(buildFullName(context)); return new TypeFieldMapper(fieldType, context.indexSettings()); } } @@ -95,12 +90,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - throw new MapperParsingException(NAME + " is not configurable"); - } - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - parseField(builder, builder.name, node, parserContext); - return builder; + throw new MapperParsingException(NAME + " is not configurable"); } @Override @@ -189,9 +179,9 @@ public class TypeFieldMapper extends MetadataFieldMapper { if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) { return; } - fields.add(new Field(fieldType().names().indexName(), context.type(), fieldType())); + fields.add(new Field(fieldType().name(), context.type(), fieldType())); if (fieldType().hasDocValues()) { - fields.add(new SortedSetDocValuesField(fieldType().names().indexName(), new BytesRef(context.type()))); + fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(context.type()))); } } @@ -202,30 +192,11 @@ public class TypeFieldMapper extends MetadataFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (indexCreatedBefore2x == false) { - return builder; - } - boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - - // if all are defaults, no sense to write it at all - boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; - boolean defaultIndexed = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE; - if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && indexed == defaultIndexed) { - return builder; - } - builder.startObject(CONTENT_TYPE); - if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) { - builder.field("store", fieldType().stored()); - } - if (includeDefaults || indexed != defaultIndexed) { - builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized())); - } - builder.endObject(); return builder; } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java index ef4c48e62e3..828651409b1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; @@ -67,7 +66,7 @@ public class UidFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); NESTED_FIELD_TYPE = FIELD_TYPE.clone(); @@ -79,14 +78,13 @@ public class UidFieldMapper extends MetadataFieldMapper { public static class Builder extends MetadataFieldMapper.Builder { public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); + super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } @Override public UidFieldMapper build(BuilderContext context) { setupFieldType(context); - fieldType.setHasDocValues(context.indexCreatedVersion().before(Version.V_2_0_0_beta1)); return new UidFieldMapper(fieldType, defaultFieldType, context.indexSettings()); } } @@ -94,12 +92,7 @@ public class UidFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - throw new MapperParsingException(NAME + " is not configurable"); - } - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - parseField(builder, builder.name, node, parserContext); - return builder; + throw new MapperParsingException(NAME + " is not configurable"); } @Override @@ -194,7 +187,7 @@ public class UidFieldMapper extends MetadataFieldMapper { } public Term term(String uid) { - return new Term(fieldType().names().indexName(), fieldType().indexedValueForSearch(uid)); + return new Term(fieldType().name(), fieldType().indexedValueForSearch(uid)); } @Override @@ -204,28 +197,11 @@ public class UidFieldMapper extends MetadataFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (indexCreatedBefore2x == false) { - return builder; - } - boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - - // if defaults, don't output - if (!includeDefaults && hasCustomFieldDataSettings() == false) { - return builder; - } - - builder.startObject(CONTENT_TYPE); - - if (includeDefaults || hasCustomFieldDataSettings()) { - builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap()); - } - - builder.endObject(); return builder; } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java index 292a622ab73..027b2ef05ff 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java @@ -22,21 +22,17 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DocValuesType; -import org.elasticsearch.Version; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import java.io.IOException; -import java.util.Iterator; import java.util.List; import java.util.Map; @@ -52,7 +48,7 @@ public class VersionFieldMapper extends MetadataFieldMapper { public static final MappedFieldType FIELD_TYPE = new VersionFieldType(); static { - FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setName(NAME); FIELD_TYPE.setDocValuesType(DocValuesType.NUMERIC); FIELD_TYPE.setHasDocValues(true); FIELD_TYPE.freeze(); @@ -62,7 +58,7 @@ public class VersionFieldMapper extends MetadataFieldMapper { public static class Builder extends MetadataFieldMapper.Builder { public Builder() { - super(Defaults.NAME, Defaults.FIELD_TYPE); + super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); } @Override @@ -74,16 +70,7 @@ public class VersionFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder = new Builder(); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = Strings.toUnderscoreCase(entry.getKey()); - if (fieldName.equals("doc_values_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - // ignore in 1.x, reject in 2.x - iterator.remove(); - } - } - return builder; + throw new MapperParsingException(NAME + " is not configurable"); } @Override @@ -166,7 +153,7 @@ public class VersionFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // nothing to do } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index e57ceaf8ca8..9984463ffc0 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -55,7 +55,6 @@ import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.elasticsearch.index.mapper.MapperBuilders.ipField; @@ -122,8 +121,7 @@ public class IpFieldMapper extends NumberFieldMapper { setupFieldType(context); IpFieldMapper fieldMapper = new IpFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (IpFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -231,7 +229,7 @@ public class IpFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -246,7 +244,7 @@ public class IpFieldMapper extends NumberFieldMapper { } catch (IllegalArgumentException e) { iSim = fuzziness.asLong(); } - return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -289,7 +287,7 @@ public class IpFieldMapper extends NumberFieldMapper { return; } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().names().fullName(), ipAsString, fieldType().boost()); + context.allEntries().addText(fieldType().name(), ipAsString, fieldType().boost()); } final long value = ipToLong(ipAsString); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/DynamicTemplate.java b/core/src/main/java/org/elasticsearch/index/mapper/object/DynamicTemplate.java index c51264f3dba..58602f06dfa 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/DynamicTemplate.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/DynamicTemplate.java @@ -125,13 +125,13 @@ public class DynamicTemplate { } public boolean match(ContentPath path, String name, String dynamicType) { - if (pathMatch != null && !patternMatch(pathMatch, path.fullPathAsText(name))) { + if (pathMatch != null && !patternMatch(pathMatch, path.pathAsText(name))) { return false; } if (match != null && !patternMatch(match, name)) { return false; } - if (pathUnmatch != null && patternMatch(pathUnmatch, path.fullPathAsText(name))) { + if (pathUnmatch != null && patternMatch(pathUnmatch, path.pathAsText(name))) { return false; } if (unmatch != null && patternMatch(unmatch, name)) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java index 88f89719050..9f3b503ab49 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java @@ -24,23 +24,33 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.CopyOnWriteHashMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.object; -import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType; /** * @@ -54,7 +64,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, public static final boolean ENABLED = true; public static final Nested NESTED = Nested.NO; public static final Dynamic DYNAMIC = null; // not set, inherited from root - public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL; } public static enum Dynamic { @@ -104,8 +113,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, protected Dynamic dynamic = Defaults.DYNAMIC; - protected ContentPath.Type pathType = Defaults.PATH_TYPE; - protected Boolean includeInAll; protected final List mappersBuilders = new ArrayList<>(); @@ -130,11 +137,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, return builder; } - public T pathType(ContentPath.Type pathType) { - this.pathType = pathType; - return builder; - } - public T includeInAll(boolean includeInAll) { this.includeInAll = includeInAll; return builder; @@ -147,8 +149,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, @Override public Y build(BuilderContext context) { - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(pathType); context.path().add(name); Map mappers = new HashMap<>(); @@ -156,17 +156,16 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, Mapper mapper = builder.build(context); mappers.put(mapper.simpleName(), mapper); } - context.path().pathType(origPathType); context.path().remove(); - ObjectMapper objectMapper = createMapper(name, context.path().fullPathAsText(name), enabled, nested, dynamic, pathType, mappers, context.indexSettings()); - objectMapper.includeInAllIfNotSet(includeInAll); + ObjectMapper objectMapper = createMapper(name, context.path().pathAsText(name), enabled, nested, dynamic, mappers, context.indexSettings()); + objectMapper = objectMapper.includeInAllIfNotSet(includeInAll); return (Y) objectMapper; } - protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map mappers, @Nullable Settings settings) { - return new ObjectMapper(name, fullPath, enabled, nested, dynamic, pathType, mappers); + protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, Map mappers, @Nullable Settings settings) { + return new ObjectMapper(name, fullPath, enabled, nested, dynamic, mappers); } } @@ -179,7 +178,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, Map.Entry entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); - if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder) || parseObjectProperties(name, fieldName, fieldNode, parserContext, builder)) { + if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder)) { iterator.remove(); } } @@ -214,14 +213,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, return false; } - protected static boolean parseObjectProperties(String name, String fieldName, Object fieldNode, ParserContext parserContext, ObjectMapper.Builder builder) { - if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.pathType(parsePathType(name, fieldNode.toString())); - return true; - } - return false; - } - protected static void parseNested(String name, Map node, ObjectMapper.Builder builder) { boolean nested = false; boolean nestedIncludeInParent = false; @@ -326,19 +317,16 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, private volatile Dynamic dynamic; - private final ContentPath.Type pathType; - private Boolean includeInAll; private volatile CopyOnWriteHashMap mappers; - ObjectMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map mappers) { + ObjectMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, Map mappers) { super(name); this.fullPath = fullPath; this.enabled = enabled; this.nested = nested; this.dynamic = dynamic; - this.pathType = pathType; if (mappers == null) { this.mappers = new CopyOnWriteHashMap<>(); } else { @@ -380,50 +368,58 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, return this.enabled; } - public ContentPath.Type pathType() { - return pathType; - } - public Mapper getMapper(String field) { return mappers.get(field); } @Override - public void includeInAll(Boolean includeInAll) { + public ObjectMapper includeInAll(Boolean includeInAll) { if (includeInAll == null) { - return; + return this; } - this.includeInAll = includeInAll; + + ObjectMapper clone = clone(); + clone.includeInAll = includeInAll; // when called from outside, apply this on all the inner mappers - for (Mapper mapper : mappers.values()) { + for (Mapper mapper : clone.mappers.values()) { if (mapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mapper).includeInAll(includeInAll); + clone.putMapper(((AllFieldMapper.IncludeInAll) mapper).includeInAll(includeInAll)); } } + return clone; } @Override - public void includeInAllIfNotSet(Boolean includeInAll) { - if (this.includeInAll == null) { - this.includeInAll = includeInAll; + public ObjectMapper includeInAllIfNotSet(Boolean includeInAll) { + if (includeInAll == null || this.includeInAll != null) { + return this; } + + ObjectMapper clone = clone(); + clone.includeInAll = includeInAll; // when called from outside, apply this on all the inner mappers - for (Mapper mapper : mappers.values()) { + for (Mapper mapper : clone.mappers.values()) { if (mapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll); + clone.putMapper(((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll)); } } + return clone; } @Override - public void unsetIncludeInAll() { - includeInAll = null; + public ObjectMapper unsetIncludeInAll() { + if (includeInAll == null) { + return this; + } + ObjectMapper clone = clone(); + clone.includeInAll = null; // when called from outside, apply this on all the inner mappers for (Mapper mapper : mappers.values()) { if (mapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll(); + clone.putMapper(((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll()); } } + return clone; } public Nested nested() { @@ -434,14 +430,9 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, return this.nestedTypeFilter; } - /** - * Put a new mapper. - * NOTE: this method must be called under the current {@link DocumentMapper} - * lock if concurrent updates are expected. - */ - public void putMapper(Mapper mapper) { + protected void putMapper(Mapper mapper) { if (mapper instanceof AllFieldMapper.IncludeInAll) { - ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll); + mapper = ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll); } mappers = mappers.copyAndPut(mapper.simpleName(), mapper); } @@ -464,64 +455,65 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, } @Override - public void merge(final Mapper mergeWith, final MergeResult mergeResult) { + public ObjectMapper merge(Mapper mergeWith, boolean updateAllTypes) { if (!(mergeWith instanceof ObjectMapper)) { - mergeResult.addConflict("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]"); - return; + throw new IllegalArgumentException("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]"); } ObjectMapper mergeWithObject = (ObjectMapper) mergeWith; + ObjectMapper merged = clone(); + merged.doMerge(mergeWithObject, updateAllTypes); + return merged; + } + protected void doMerge(final ObjectMapper mergeWith, boolean updateAllTypes) { if (nested().isNested()) { - if (!mergeWithObject.nested().isNested()) { - mergeResult.addConflict("object mapping [" + name() + "] can't be changed from nested to non-nested"); - return; + if (!mergeWith.nested().isNested()) { + throw new IllegalArgumentException("object mapping [" + name() + "] can't be changed from nested to non-nested"); } } else { - if (mergeWithObject.nested().isNested()) { - mergeResult.addConflict("object mapping [" + name() + "] can't be changed from non-nested to nested"); - return; + if (mergeWith.nested().isNested()) { + throw new IllegalArgumentException("object mapping [" + name() + "] can't be changed from non-nested to nested"); } } - if (!mergeResult.simulate()) { - if (mergeWithObject.dynamic != null) { - this.dynamic = mergeWithObject.dynamic; - } + if (mergeWith.dynamic != null) { + this.dynamic = mergeWith.dynamic; } - doMerge(mergeWithObject, mergeResult); - - List mappersToPut = new ArrayList<>(); - List newObjectMappers = new ArrayList<>(); - List newFieldMappers = new ArrayList<>(); - for (Mapper mapper : mergeWithObject) { - Mapper mergeWithMapper = mapper; + for (Mapper mergeWithMapper : mergeWith) { Mapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName()); + Mapper merged; if (mergeIntoMapper == null) { - // no mapping, simply add it if not simulating - if (!mergeResult.simulate()) { - mappersToPut.add(mergeWithMapper); - MapperUtils.collect(mergeWithMapper, newObjectMappers, newFieldMappers); - } - } else if (mergeIntoMapper instanceof MetadataFieldMapper == false) { + // no mapping, simply add it + merged = mergeWithMapper; + } else { // root mappers can only exist here for backcompat, and are merged in Mapping - mergeIntoMapper.merge(mergeWithMapper, mergeResult); + merged = mergeIntoMapper.merge(mergeWithMapper, updateAllTypes); } - } - if (!newFieldMappers.isEmpty()) { - mergeResult.addFieldMappers(newFieldMappers); - } - if (!newObjectMappers.isEmpty()) { - mergeResult.addObjectMappers(newObjectMappers); - } - // add the mappers only after the administration have been done, so it will not be visible to parser (which first try to read with no lock) - for (Mapper mapper : mappersToPut) { - putMapper(mapper); + putMapper(merged); } } - protected void doMerge(ObjectMapper mergeWith, MergeResult mergeResult) { - + @Override + public ObjectMapper updateFieldType(Map fullNameToFieldType) { + List updatedMappers = null; + for (Mapper mapper : this) { + Mapper updated = mapper.updateFieldType(fullNameToFieldType); + if (mapper != updated) { + if (updatedMappers == null) { + updatedMappers = new ArrayList<>(); + } + updatedMappers.add(updated); + } + } + if (updatedMappers == null) { + return this; + } + ObjectMapper updated = clone(); + for (Mapper updatedMapper : updatedMappers) { + updated.putMapper(updatedMapper); + } + return updated; } @Override @@ -549,9 +541,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, if (enabled != Defaults.ENABLED) { builder.field("enabled", enabled); } - if (pathType != Defaults.PATH_TYPE) { - builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); - } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index a0c989abd7d..64a60305b10 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -26,11 +26,21 @@ import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DateFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter; @@ -95,7 +105,7 @@ public class RootObjectMapper extends ObjectMapper { @Override - protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map mappers, @Nullable Settings settings) { + protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, Map mappers, @Nullable Settings settings) { assert !nested.isNested(); FormatDateTimeFormatter[] dates = null; if (dynamicDateTimeFormatters == null) { @@ -106,7 +116,7 @@ public class RootObjectMapper extends ObjectMapper { } else { dates = dynamicDateTimeFormatters.toArray(new FormatDateTimeFormatter[dynamicDateTimeFormatters.size()]); } - return new RootObjectMapper(name, enabled, dynamic, pathType, mappers, + return new RootObjectMapper(name, enabled, dynamic, mappers, dates, dynamicTemplates.toArray(new DynamicTemplate[dynamicTemplates.size()]), dateDetection, numericDetection); @@ -196,9 +206,9 @@ public class RootObjectMapper extends ObjectMapper { private volatile DynamicTemplate dynamicTemplates[]; - RootObjectMapper(String name, boolean enabled, Dynamic dynamic, ContentPath.Type pathType, Map mappers, + RootObjectMapper(String name, boolean enabled, Dynamic dynamic, Map mappers, FormatDateTimeFormatter[] dynamicDateTimeFormatters, DynamicTemplate dynamicTemplates[], boolean dateDetection, boolean numericDetection) { - super(name, name, enabled, Nested.NO, dynamic, pathType, mappers); + super(name, name, enabled, Nested.NO, dynamic, mappers); this.dynamicTemplates = dynamicTemplates; this.dynamicDateTimeFormatters = dynamicDateTimeFormatters; this.dateDetection = dateDetection; @@ -253,25 +263,34 @@ public class RootObjectMapper extends ObjectMapper { } @Override - protected void doMerge(ObjectMapper mergeWith, MergeResult mergeResult) { + public RootObjectMapper merge(Mapper mergeWith, boolean updateAllTypes) { + return (RootObjectMapper) super.merge(mergeWith, updateAllTypes); + } + + @Override + protected void doMerge(ObjectMapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith; - if (!mergeResult.simulate()) { - // merge them - List mergedTemplates = new ArrayList<>(Arrays.asList(this.dynamicTemplates)); - for (DynamicTemplate template : mergeWithObject.dynamicTemplates) { - boolean replaced = false; - for (int i = 0; i < mergedTemplates.size(); i++) { - if (mergedTemplates.get(i).name().equals(template.name())) { - mergedTemplates.set(i, template); - replaced = true; - } - } - if (!replaced) { - mergedTemplates.add(template); + // merge them + List mergedTemplates = new ArrayList<>(Arrays.asList(this.dynamicTemplates)); + for (DynamicTemplate template : mergeWithObject.dynamicTemplates) { + boolean replaced = false; + for (int i = 0; i < mergedTemplates.size(); i++) { + if (mergedTemplates.get(i).name().equals(template.name())) { + mergedTemplates.set(i, template); + replaced = true; } } - this.dynamicTemplates = mergedTemplates.toArray(new DynamicTemplate[mergedTemplates.size()]); + if (!replaced) { + mergedTemplates.add(template); + } } + this.dynamicTemplates = mergedTemplates.toArray(new DynamicTemplate[mergedTemplates.size()]); + } + + @Override + public RootObjectMapper updateFieldType(Map fullNameToFieldType) { + return (RootObjectMapper) super.updateFieldType(fullNameToFieldType); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/merge/MergeStats.java b/core/src/main/java/org/elasticsearch/index/merge/MergeStats.java index bcacb3516da..a6f1d1c3291 100644 --- a/core/src/main/java/org/elasticsearch/index/merge/MergeStats.java +++ b/core/src/main/java/org/elasticsearch/index/merge/MergeStats.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.merge; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; diff --git a/core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java b/core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java index 26b52f773e9..c79c7d7da25 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java @@ -54,7 +54,7 @@ final class QueriesLoaderCollector extends SimpleCollector { QueriesLoaderCollector(PercolatorQueriesRegistry percolator, ESLogger logger, MapperService mapperService, IndexFieldDataService indexFieldDataService) { this.percolator = percolator; this.logger = logger; - final MappedFieldType uidMapper = mapperService.smartNameFieldType(UidFieldMapper.NAME); + final MappedFieldType uidMapper = mapperService.fullName(UidFieldMapper.NAME); this.uidFieldData = indexFieldDataService.getForField(uidMapper); } diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java index b8170a3195a..69ee2a81061 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java @@ -273,8 +273,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { return new MatchAllDocsQuery(); } final String minimumShouldMatch; - if (context.isFilter() && this.minimumShouldMatch == null) { - //will be applied for real only if there are should clauses + if (context.isFilter() && this.minimumShouldMatch == null && shouldClauses.size() > 0) { minimumShouldMatch = "1"; } else { minimumShouldMatch = this.minimumShouldMatch; diff --git a/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java index c7349cca3e6..50346c2d36e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; import org.apache.lucene.queries.BoostingQuery; +import org.apache.lucene.search.Query; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java index 20d0b62b725..550ffe89882 100644 --- a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java @@ -235,7 +235,7 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder if (fieldNamesFieldType.isEnabled()) { final String f; if (fieldType != null) { - f = fieldType.names().indexName(); + f = fieldType.name(); } else { f = field; } diff --git a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java index e9258d7cfc1..9ce592ca4cf 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java @@ -87,7 +87,7 @@ public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder { String fieldName = null; ShapeRelation shapeRelation = null; SpatialStrategy strategy = null; - BytesReference shape = null; + ShapeBuilder shape = null; String id = null; String type = null; @@ -79,8 +77,7 @@ public class GeoShapeQueryParser implements QueryParser { currentFieldName = parser.currentName(); token = parser.nextToken(); if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_FIELD)) { - XContentBuilder builder = XContentFactory.jsonBuilder().copyCurrentStructure(parser); - shape = builder.bytes(); + shape = ShapeBuilder.parse(parser); } else if (parseContext.parseFieldMatcher().match(currentFieldName, STRATEGY_FIELD)) { String strategyName = parser.text(); strategy = SpatialStrategy.fromString(strategyName); diff --git a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java index 1649d12f186..07e92a6dc16 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java @@ -297,7 +297,7 @@ public class GeohashCellQuery { while ((token = parser.nextToken()) != Token.END_OBJECT) { if (token == Token.FIELD_NAME) { - String field = parser.text(); + String field = parser.currentName(); if (parseContext.isDeprecatedSetting(field)) { // skip diff --git a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java index 4f9574f2981..e4dc1bcff94 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java @@ -30,7 +30,12 @@ import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; /** * A query that will return only documents matching specific ids (and a type). diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index 39612b7a856..df9dcb497d0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -810,7 +810,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder diff --git a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index f69ac8c0548..353dbd668ac 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -28,11 +28,16 @@ import org.apache.lucene.util.Bits; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.script.*; +import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.Script; import org.elasticsearch.script.Script.ScriptField; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; +import java.util.Collections; import java.util.Objects; public class ScriptQueryBuilder extends AbstractQueryBuilder { @@ -80,7 +85,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder public ScriptQuery(Script script, ScriptService scriptService, SearchLookup searchLookup) { this.script = script; - this.searchScript = scriptService.search(searchLookup, script, ScriptContext.Standard.SEARCH); + this.searchScript = scriptService.search(searchLookup, script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); } @Override @@ -161,4 +166,4 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder protected boolean doEquals(ScriptQueryBuilder other) { return Objects.equals(script, other.script); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java index 7627644e750..fcf1a12eba7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java @@ -23,7 +23,12 @@ import org.apache.lucene.analysis.CachingTokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.index.Term; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import java.io.IOException; @@ -299,7 +304,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp // For further reasoning see // https://issues.apache.org/jira/browse/LUCENE-4021 return (Objects.equals(locale.toLanguageTag(), other.locale.toLanguageTag()) - && Objects.equals(lowercaseExpandedTerms, other.lowercaseExpandedTerms) + && Objects.equals(lowercaseExpandedTerms, other.lowercaseExpandedTerms) && Objects.equals(lenient, other.lenient) && Objects.equals(analyzeWildcard, other.analyzeWildcard)); } diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index 092f966d8d7..17240a22bc3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -294,7 +294,7 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder { MappedFieldType fieldType = context.fieldMapper(fieldName); String indexFieldName; if (fieldType != null) { - indexFieldName = fieldType.names().indexName(); + indexFieldName = fieldType.name(); } else { indexFieldName = fieldName; } diff --git a/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java index 7c3cc1c30a3..314bc6faeb2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java @@ -118,7 +118,7 @@ public class WildcardQueryBuilder extends AbstractQueryBuilder ext @Override protected String getFieldName() { - return fieldData.getFieldNames().fullName(); + return fieldData.getFieldName(); } @Override @@ -446,7 +450,7 @@ public abstract class DecayFunctionBuilder ext @Override protected String getFieldName() { - return fieldData.getFieldNames().fullName(); + return fieldData.getFieldName(); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java index 1b4dbaea3e2..4075ae54dbb 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java @@ -20,13 +20,13 @@ package org.elasticsearch.index.query.functionscore; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java index d5c260f9616..766911bb747 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java @@ -30,7 +30,11 @@ import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.EmptyQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionBuilder; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java index d738f3a259d..6822ab3e240 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java @@ -22,13 +22,18 @@ package org.elasticsearch.index.query.functionscore; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.xcontent.*; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; +import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.EmptyQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java index df76f14e8d8..52324b9654b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.query.functionscore; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java index c528c0007f2..e7ce9b90e2b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java @@ -19,13 +19,11 @@ package org.elasticsearch.index.query.functionscore; -import java.util.Map; - import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.xcontent.XContentLocation; -import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionParser; import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionParser; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser; @@ -74,11 +72,12 @@ public class ScoreFunctionParserMapper { return functionParsers.get(parserName); } - private static void addParser(ScoreFunctionParser scoreFunctionParser, Map> map, NamedWriteableRegistry namedWriteableRegistry) { + private static void addParser(ScoreFunctionParser scoreFunctionParser, Map> map, NamedWriteableRegistry namedWriteableRegistry) { for (String name : scoreFunctionParser.getNames()) { map.put(name, scoreFunctionParser); } - namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, scoreFunctionParser.getBuilderPrototype()); + @SuppressWarnings("unchecked") NamedWriteable sfb = scoreFunctionParser.getBuilderPrototype(); + namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, sfb); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java index 4a73d4b2bec..d686e78635b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java @@ -148,7 +148,7 @@ public class FieldValueFactorFunctionBuilder extends ScoreFunctionBuilder()); } diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java index f68699ac2a2..6f92e411c00 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java @@ -123,7 +123,7 @@ public class GeoDistanceRangeQuery extends Query { } public String fieldName() { - return indexFieldData.getFieldNames().indexName(); + return indexFieldData.getFieldName(); } @Override @@ -144,14 +144,15 @@ public class GeoDistanceRangeQuery extends Query { public Scorer scorer(LeafReaderContext context) throws IOException { final DocIdSetIterator approximation; if (boundingBoxWeight != null) { - approximation = boundingBoxWeight.scorer(context); + Scorer s = boundingBoxWeight.scorer(context); + if (s == null) { + // if the approximation does not match anything, we're done + return null; + } + approximation = s.iterator(); } else { approximation = DocIdSetIterator.all(context.reader().maxDoc()); } - if (approximation == null) { - // if the approximation does not match anything, we're done - return null; - } final MultiGeoPointValues values = indexFieldData.load(context).getGeoPointValues(); final TwoPhaseIterator twoPhaseIterator = new TwoPhaseIterator(approximation) { @Override @@ -197,7 +198,7 @@ public class GeoDistanceRangeQuery extends Query { if (Double.compare(filter.inclusiveUpperPoint, inclusiveUpperPoint) != 0) return false; if (Double.compare(filter.lat, lat) != 0) return false; if (Double.compare(filter.lon, lon) != 0) return false; - if (!indexFieldData.getFieldNames().indexName().equals(filter.indexFieldData.getFieldNames().indexName())) + if (!indexFieldData.getFieldName().equals(filter.indexFieldData.getFieldName())) return false; if (geoDistance != filter.geoDistance) return false; @@ -206,7 +207,7 @@ public class GeoDistanceRangeQuery extends Query { @Override public String toString(String field) { - return "GeoDistanceRangeQuery(" + indexFieldData.getFieldNames().indexName() + ", " + geoDistance + ", [" + inclusiveLowerPoint + " - " + inclusiveUpperPoint + "], " + lat + ", " + lon + ")"; + return "GeoDistanceRangeQuery(" + indexFieldData.getFieldName() + ", " + geoDistance + ", [" + inclusiveLowerPoint + " - " + inclusiveUpperPoint + "], " + lat + ", " + lon + ")"; } @Override @@ -222,7 +223,7 @@ public class GeoDistanceRangeQuery extends Query { temp = inclusiveUpperPoint != +0.0d ? Double.doubleToLongBits(inclusiveUpperPoint) : 0L; result = 31 * result + Long.hashCode(temp); result = 31 * result + (geoDistance != null ? geoDistance.hashCode() : 0); - result = 31 * result + indexFieldData.getFieldNames().indexName().hashCode(); + result = 31 * result + indexFieldData.getFieldName().hashCode(); return result; } diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java index 71e369cce0b..d62aa76efd9 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java @@ -51,7 +51,7 @@ public class GeoPolygonQuery extends Query { } public String fieldName() { - return indexFieldData.getFieldNames().indexName(); + return indexFieldData.getFieldName(); } @Override @@ -104,7 +104,7 @@ public class GeoPolygonQuery extends Query { @Override public String toString(String field) { StringBuilder sb = new StringBuilder("GeoPolygonQuery("); - sb.append(indexFieldData.getFieldNames().indexName()); + sb.append(indexFieldData.getFieldName()); sb.append(", ").append(Arrays.toString(points)).append(')'); return sb.toString(); } @@ -115,14 +115,14 @@ public class GeoPolygonQuery extends Query { return false; } GeoPolygonQuery that = (GeoPolygonQuery) obj; - return indexFieldData.getFieldNames().indexName().equals(that.indexFieldData.getFieldNames().indexName()) + return indexFieldData.getFieldName().equals(that.indexFieldData.getFieldName()) && Arrays.equals(points, that.points); } @Override public int hashCode() { int h = super.hashCode(); - h = 31 * h + indexFieldData.getFieldNames().indexName().hashCode(); + h = 31 * h + indexFieldData.getFieldName().hashCode(); h = 31 * h + Arrays.hashCode(points); return h; } diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java index a2e9e1b689d..2f2801a2abe 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java @@ -57,7 +57,7 @@ public class InMemoryGeoBoundingBoxQuery extends Query { } public String fieldName() { - return indexFieldData.getFieldNames().indexName(); + return indexFieldData.getFieldName(); } @Override @@ -79,7 +79,7 @@ public class InMemoryGeoBoundingBoxQuery extends Query { @Override public String toString(String field) { - return "GeoBoundingBoxFilter(" + indexFieldData.getFieldNames().indexName() + ", " + topLeft + ", " + bottomRight + ")"; + return "GeoBoundingBoxFilter(" + indexFieldData.getFieldName() + ", " + topLeft + ", " + bottomRight + ")"; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java index 13290f98920..436fc80d521 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy; /** @@ -33,7 +32,7 @@ public class IndexedGeoBoundingBoxQuery { public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapperLegacy.GeoPointFieldType fieldType) { if (!fieldType.isLatLonEnabled()) { - throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.names().fullName() + "], can't use indexed filter on it"); + throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.name() + "], can't use indexed filter on it"); } //checks to see if bounding box crosses 180 degrees if (topLeft.lon() > bottomRight.lon()) { diff --git a/core/src/main/java/org/elasticsearch/index/search/stats/SearchSlowLog.java b/core/src/main/java/org/elasticsearch/index/search/stats/SearchSlowLog.java index 108dab449a3..f193df5ef82 100644 --- a/core/src/main/java/org/elasticsearch/index/search/stats/SearchSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/search/stats/SearchSlowLog.java @@ -24,10 +24,8 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.internal.SearchContext; -import java.io.IOException; import java.util.Locale; import java.util.concurrent.TimeUnit; diff --git a/core/src/main/java/org/elasticsearch/index/shard/IllegalIndexShardStateException.java b/core/src/main/java/org/elasticsearch/index/shard/IllegalIndexShardStateException.java index 31c235e09ec..626e72acf41 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IllegalIndexShardStateException.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IllegalIndexShardStateException.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.shard; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 049391e48de..ff9a1e71bab 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -19,19 +19,11 @@ package org.elasticsearch.index.shard; -import java.io.IOException; -import java.io.PrintStream; -import java.nio.channels.ClosedByInterruptException; -import java.nio.charset.StandardCharsets; -import java.util.*; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; - -import org.apache.lucene.index.*; +import org.apache.lucene.index.CheckIndex; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.Term; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.UsageTrackingQueryCachingPolicy; import org.apache.lucene.store.AlreadyClosedException; @@ -73,7 +65,16 @@ import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.cache.request.ShardRequestCache; import org.elasticsearch.index.codec.CodecService; -import org.elasticsearch.index.engine.*; +import org.elasticsearch.index.engine.CommitStats; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.engine.EngineClosedException; +import org.elasticsearch.index.engine.EngineConfig; +import org.elasticsearch.index.engine.EngineException; +import org.elasticsearch.index.engine.EngineFactory; +import org.elasticsearch.index.engine.InternalEngineFactory; +import org.elasticsearch.index.engine.RefreshFailedEngineException; +import org.elasticsearch.index.engine.Segment; +import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.ShardFieldData; @@ -82,7 +83,12 @@ import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.get.ShardGetService; import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.indexing.ShardIndexingService; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperForType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.percolator.PercolateStats; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; @@ -103,7 +109,6 @@ import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogStats; -import org.elasticsearch.index.translog.TranslogWriter; import org.elasticsearch.index.warmer.ShardIndexWarmerService; import org.elasticsearch.index.warmer.WarmerStats; import org.elasticsearch.indices.IndicesWarmer; @@ -116,6 +121,20 @@ import org.elasticsearch.search.suggest.completion.CompletionFieldStats; import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.threadpool.ThreadPool; +import java.io.IOException; +import java.io.PrintStream; +import java.nio.channels.ClosedByInterruptException; +import java.nio.charset.StandardCharsets; +import java.util.EnumSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; public class IndexShard extends AbstractIndexShardComponent { @@ -155,7 +174,6 @@ public class IndexShard extends AbstractIndexShardComponent { private TimeValue refreshInterval; private volatile ScheduledFuture refreshScheduledFuture; - private volatile ScheduledFuture mergeScheduleFuture; protected volatile ShardRouting shardRouting; protected volatile IndexShardState state; protected final AtomicReference currentEngineReference = new AtomicReference<>(); @@ -170,18 +188,14 @@ public class IndexShard extends AbstractIndexShardComponent { private final ShardEventListener shardEventListener = new ShardEventListener(); private volatile boolean flushOnClose = true; - private volatile int flushThresholdOperations; private volatile ByteSizeValue flushThresholdSize; - private volatile boolean disableFlush; /** * Index setting to control if a flush is executed before engine is closed * This setting is realtime updateable. */ public static final String INDEX_FLUSH_ON_CLOSE = "index.flush_on_close"; - public static final String INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS = "index.translog.flush_threshold_ops"; public static final String INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE = "index.translog.flush_threshold_size"; - public static final String INDEX_TRANSLOG_DISABLE_FLUSH = "index.translog.disable_flush"; public static final String INDEX_REFRESH_INTERVAL = "index.refresh_interval"; private final ShardPath path; @@ -237,8 +251,8 @@ public class IndexShard extends AbstractIndexShardComponent { logger.debug("state: [CREATED]"); this.checkIndexOnStartup = settings.get("index.shard.check_on_startup", "false"); - this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, getFromSettings(logger, settings, Translog.Durabilty.REQUEST), - provider.getBigArrays(), threadPool); + this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, + provider.getBigArrays()); final QueryCachingPolicy cachingPolicy; // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis @@ -249,9 +263,7 @@ public class IndexShard extends AbstractIndexShardComponent { } this.engineConfig = newEngineConfig(translogConfig, cachingPolicy); - this.flushThresholdOperations = settings.getAsInt(INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, settings.getAsInt("index.translog.flush_threshold", Integer.MAX_VALUE)); this.flushThresholdSize = settings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB)); - this.disableFlush = settings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, false); this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId); this.indexingMemoryController = provider.getIndexingMemoryController(); this.provider = provider; @@ -788,8 +800,6 @@ public class IndexShard extends AbstractIndexShardComponent { if (state != IndexShardState.CLOSED) { FutureUtils.cancel(refreshScheduledFuture); refreshScheduledFuture = null; - FutureUtils.cancel(mergeScheduleFuture); - mergeScheduleFuture = null; } changeState(IndexShardState.CLOSED, reason); indexShardOperationCounter.decRef(); @@ -1065,7 +1075,8 @@ public class IndexShard extends AbstractIndexShardComponent { // we are the first primary, recover from the gateway // if its post api allocation, the index should exists assert shardRouting.primary() : "recover from store only makes sense if the shard is a primary shard"; - final boolean shouldExist = shardRouting.allocatedPostIndexCreate(); + boolean shouldExist = shardRouting.allocatedPostIndexCreate(idxSettings.getIndexMetaData()); + StoreRecovery storeRecovery = new StoreRecovery(shardId, logger); return storeRecovery.recoverFromStore(this, shouldExist, localNode); } @@ -1081,15 +1092,13 @@ public class IndexShard extends AbstractIndexShardComponent { * Otherwise false. */ boolean shouldFlush() { - if (disableFlush == false) { - Engine engine = getEngineOrNull(); - if (engine != null) { - try { - Translog translog = engine.getTranslog(); - return translog.totalOperations() > flushThresholdOperations || translog.sizeInBytes() > flushThresholdSize.bytes(); - } catch (AlreadyClosedException | EngineClosedException ex) { - // that's fine we are already close - no need to flush - } + Engine engine = getEngineOrNull(); + if (engine != null) { + try { + Translog translog = engine.getTranslog(); + return translog.sizeInBytes() > flushThresholdSize.bytes(); + } catch (AlreadyClosedException | EngineClosedException ex) { + // that's fine we are already close - no need to flush } } return false; @@ -1101,21 +1110,11 @@ public class IndexShard extends AbstractIndexShardComponent { if (state() == IndexShardState.CLOSED) { // no need to update anything if we are closed return; } - int flushThresholdOperations = settings.getAsInt(INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, this.flushThresholdOperations); - if (flushThresholdOperations != this.flushThresholdOperations) { - logger.info("updating flush_threshold_ops from [{}] to [{}]", this.flushThresholdOperations, flushThresholdOperations); - this.flushThresholdOperations = flushThresholdOperations; - } ByteSizeValue flushThresholdSize = settings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, this.flushThresholdSize); if (!flushThresholdSize.equals(this.flushThresholdSize)) { logger.info("updating flush_threshold_size from [{}] to [{}]", this.flushThresholdSize, flushThresholdSize); this.flushThresholdSize = flushThresholdSize; } - boolean disableFlush = settings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, this.disableFlush); - if (disableFlush != this.disableFlush) { - logger.info("updating disable_flush from [{}] to [{}]", this.disableFlush, disableFlush); - this.disableFlush = disableFlush; - } final EngineConfig config = engineConfig; final boolean flushOnClose = settings.getAsBoolean(INDEX_FLUSH_ON_CLOSE, this.flushOnClose); @@ -1124,18 +1123,6 @@ public class IndexShard extends AbstractIndexShardComponent { this.flushOnClose = flushOnClose; } - TranslogWriter.Type type = TranslogWriter.Type.fromString(settings.get(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, translogConfig.getType().name())); - if (type != translogConfig.getType()) { - logger.info("updating type from [{}] to [{}]", translogConfig.getType(), type); - translogConfig.setType(type); - } - - final Translog.Durabilty durabilty = getFromSettings(logger, settings, translogConfig.getDurabilty()); - if (durabilty != translogConfig.getDurabilty()) { - logger.info("updating durability from [{}] to [{}]", translogConfig.getDurabilty(), durabilty); - translogConfig.setDurabilty(durabilty); - } - TimeValue refreshInterval = settings.getAsTime(INDEX_REFRESH_INTERVAL, this.refreshInterval); if (!refreshInterval.equals(this.refreshInterval)) { logger.info("updating refresh_interval from [{}] to [{}]", this.refreshInterval, refreshInterval); @@ -1159,13 +1146,6 @@ public class IndexShard extends AbstractIndexShardComponent { change = true; } - final boolean compoundOnFlush = settings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, config.isCompoundOnFlush()); - if (compoundOnFlush != config.isCompoundOnFlush()) { - logger.info("updating {} from [{}] to [{}]", EngineConfig.INDEX_COMPOUND_ON_FLUSH, config.isCompoundOnFlush(), compoundOnFlush); - config.setCompoundOnFlush(compoundOnFlush); - change = true; - } - final int maxThreadCount = settings.getAsInt(MergeSchedulerConfig.MAX_THREAD_COUNT, mergeSchedulerConfig.getMaxThreadCount()); if (maxThreadCount != mergeSchedulerConfig.getMaxThreadCount()) { logger.info("updating [{}] from [{}] to [{}]", MergeSchedulerConfig.MAX_THREAD_COUNT, mergeSchedulerConfig.getMaxMergeCount(), maxThreadCount); @@ -1556,18 +1536,8 @@ public class IndexShard extends AbstractIndexShardComponent { /** * Returns the current translog durability mode */ - public Translog.Durabilty getTranslogDurability() { - return translogConfig.getDurabilty(); - } - - private static Translog.Durabilty getFromSettings(ESLogger logger, Settings settings, Translog.Durabilty defaultValue) { - final String value = settings.get(TranslogConfig.INDEX_TRANSLOG_DURABILITY, defaultValue.name()); - try { - return Translog.Durabilty.valueOf(value.toUpperCase(Locale.ROOT)); - } catch (IllegalArgumentException ex) { - logger.warn("Can't apply {} illegal value: {} using {} instead, use one of: {}", TranslogConfig.INDEX_TRANSLOG_DURABILITY, value, defaultValue, Arrays.toString(Translog.Durabilty.values())); - return defaultValue; - } + public Translog.Durability getTranslogDurability() { + return indexSettings.getTranslogDurability(); } private final AtomicBoolean asyncFlushRunning = new AtomicBoolean(); diff --git a/core/src/main/java/org/elasticsearch/index/shard/MergeSchedulerConfig.java b/core/src/main/java/org/elasticsearch/index/shard/MergeSchedulerConfig.java index c329722a135..a90bf2d7d91 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/MergeSchedulerConfig.java +++ b/core/src/main/java/org/elasticsearch/index/shard/MergeSchedulerConfig.java @@ -29,19 +29,19 @@ import org.elasticsearch.index.IndexSettings; * merge operations once they are needed (according to the merge policy). Merges * run in separate threads, and when the maximum number of threads is reached, * further merges will wait until a merge thread becomes available. - * + * *

    The merge scheduler supports the following dynamic settings: - * + * *

      *
    • index.merge.scheduler.max_thread_count: - * + * * The maximum number of threads that may be merging at once. Defaults to * Math.max(1, Math.min(4, Runtime.getRuntime().availableProcessors() / 2)) * which works well for a good solid-state-disk (SSD). If your index is on * spinning platter drives instead, decrease this to 1. - * + * *
    • index.merge.scheduler.auto_throttle: - * + * * If this is true (the default), then the merge scheduler will rate-limit IO * (writes) for merges to an adaptive value depending on how many merges are * requested over time. An application with a low indexing rate that @@ -55,19 +55,16 @@ public final class MergeSchedulerConfig { public static final String MAX_THREAD_COUNT = "index.merge.scheduler.max_thread_count"; public static final String MAX_MERGE_COUNT = "index.merge.scheduler.max_merge_count"; public static final String AUTO_THROTTLE = "index.merge.scheduler.auto_throttle"; - public static final String NOTIFY_ON_MERGE_FAILURE = "index.merge.scheduler.notify_on_failure"; // why would we not wanna do this? private volatile boolean autoThrottle; private volatile int maxThreadCount; private volatile int maxMergeCount; - private final boolean notifyOnMergeFailure; public MergeSchedulerConfig(IndexSettings indexSettings) { final Settings settings = indexSettings.getSettings(); maxThreadCount = settings.getAsInt(MAX_THREAD_COUNT, Math.max(1, Math.min(4, EsExecutors.boundedNumberOfProcessors(settings) / 2))); maxMergeCount = settings.getAsInt(MAX_MERGE_COUNT, maxThreadCount + 5); this.autoThrottle = settings.getAsBoolean(AUTO_THROTTLE, true); - notifyOnMergeFailure = settings.getAsBoolean(NOTIFY_ON_MERGE_FAILURE, true); } /** @@ -114,11 +111,4 @@ public final class MergeSchedulerConfig { public void setMaxMergeCount(int maxMergeCount) { this.maxMergeCount = maxMergeCount; } - - /** - * Returns true iff we fail the engine on a merge failure. Default is true - */ - public boolean isNotifyOnMergeFailure() { - return notifyOnMergeFailure; - } } diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java index 50a16fa1cee..2b0ed9e50dd 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.index.shard; -import java.io.IOException; - import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.IndexSettings; @@ -35,6 +33,8 @@ import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.TranslogStats; +import java.io.IOException; + /** * ShadowIndexShard extends {@link IndexShard} to add file synchronization * from the primary when a flush happens. It also ensures that a replica being diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShardStateMetaData.java b/core/src/main/java/org/elasticsearch/index/shard/ShardStateMetaData.java index 3cee89f84be..8ec675671ed 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShardStateMetaData.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShardStateMetaData.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.shard; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShardUtils.java b/core/src/main/java/org/elasticsearch/index/shard/ShardUtils.java index 8860bd4274c..f02797e6b38 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShardUtils.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShardUtils.java @@ -19,7 +19,8 @@ package org.elasticsearch.index.shard; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.LeafReader; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.ElasticsearchLeafReader; diff --git a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java index ac46f6725de..88df4ee8da7 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java +++ b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java @@ -24,7 +24,11 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.IgnoreOnRecoveryEngineException; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapperForType; +import org.elasticsearch.index.mapper.MapperException; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.Mapping; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.translog.Translog; import java.io.IOException; @@ -110,7 +114,7 @@ public class TranslogRecoveryPerformer { if (currentUpdate == null) { recoveredTypes.put(type, update); } else { - MapperUtils.merge(currentUpdate, update); + currentUpdate = currentUpdate.merge(update, false); } } diff --git a/core/src/main/java/org/elasticsearch/index/similarity/AbstractSimilarityProvider.java b/core/src/main/java/org/elasticsearch/index/similarity/AbstractSimilarityProvider.java index 566016b3da1..fef43d6f5de 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/AbstractSimilarityProvider.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/AbstractSimilarityProvider.java @@ -19,7 +19,11 @@ package org.elasticsearch.index.similarity; -import org.apache.lucene.search.similarities.*; +import org.apache.lucene.search.similarities.Normalization; +import org.apache.lucene.search.similarities.NormalizationH1; +import org.apache.lucene.search.similarities.NormalizationH2; +import org.apache.lucene.search.similarities.NormalizationH3; +import org.apache.lucene.search.similarities.NormalizationZ; import org.elasticsearch.common.settings.Settings; /** diff --git a/core/src/main/java/org/elasticsearch/index/similarity/DefaultSimilarityProvider.java b/core/src/main/java/org/elasticsearch/index/similarity/ClassicSimilarityProvider.java similarity index 75% rename from core/src/main/java/org/elasticsearch/index/similarity/DefaultSimilarityProvider.java rename to core/src/main/java/org/elasticsearch/index/similarity/ClassicSimilarityProvider.java index 3acbd9821af..f9a6ff2f5fb 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/DefaultSimilarityProvider.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/ClassicSimilarityProvider.java @@ -19,23 +19,23 @@ package org.elasticsearch.index.similarity; -import org.apache.lucene.search.similarities.DefaultSimilarity; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.elasticsearch.common.settings.Settings; /** - * {@link SimilarityProvider} for {@link DefaultSimilarity}. + * {@link SimilarityProvider} for {@link ClassicSimilarity}. *

      * Configuration options available: *

        *
      • discount_overlaps
      • *
      - * @see DefaultSimilarity For more information about configuration + * @see ClassicSimilarity For more information about configuration */ -public class DefaultSimilarityProvider extends AbstractSimilarityProvider { +public class ClassicSimilarityProvider extends AbstractSimilarityProvider { - private final DefaultSimilarity similarity = new DefaultSimilarity(); + private final ClassicSimilarity similarity = new ClassicSimilarity(); - public DefaultSimilarityProvider(String name, Settings settings) { + public ClassicSimilarityProvider(String name, Settings settings) { super(name); boolean discountOverlaps = settings.getAsBoolean("discount_overlaps", true); this.similarity.setDiscountOverlaps(discountOverlaps); @@ -45,7 +45,7 @@ public class DefaultSimilarityProvider extends AbstractSimilarityProvider { * {@inheritDoc} */ @Override - public DefaultSimilarity get() { + public ClassicSimilarity get() { return similarity; } } diff --git a/core/src/main/java/org/elasticsearch/index/similarity/DFRSimilarityProvider.java b/core/src/main/java/org/elasticsearch/index/similarity/DFRSimilarityProvider.java index d5caa4aab98..782eed2e6a1 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/DFRSimilarityProvider.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/DFRSimilarityProvider.java @@ -33,8 +33,6 @@ import org.apache.lucene.search.similarities.BasicModelP; import org.apache.lucene.search.similarities.DFRSimilarity; import org.apache.lucene.search.similarities.Normalization; import org.apache.lucene.search.similarities.Similarity; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import java.util.HashMap; diff --git a/core/src/main/java/org/elasticsearch/index/similarity/LMJelinekMercerSimilarityProvider.java b/core/src/main/java/org/elasticsearch/index/similarity/LMJelinekMercerSimilarityProvider.java index 3d5a40fc153..cd10d297355 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/LMJelinekMercerSimilarityProvider.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/LMJelinekMercerSimilarityProvider.java @@ -21,8 +21,6 @@ package org.elasticsearch.index.similarity; import org.apache.lucene.search.similarities.LMJelinekMercerSimilarity; import org.apache.lucene.search.similarities.Similarity; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; /** diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityProvider.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityProvider.java index 6433181dd6d..6c8a17f99a5 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityProvider.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityProvider.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.similarity; import org.apache.lucene.search.similarities.Similarity; -import org.elasticsearch.common.settings.Settings; /** * Provider for {@link Similarity} instances diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index 1d08683f47b..f564b0e91d1 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -35,7 +35,7 @@ import java.util.function.BiFunction; public final class SimilarityService extends AbstractIndexComponent { - public final static String DEFAULT_SIMILARITY = "default"; + public final static String DEFAULT_SIMILARITY = "classic"; private final Similarity defaultSimilarity; private final Similarity baseSimilarity; private final Map similarities; @@ -44,9 +44,9 @@ public final class SimilarityService extends AbstractIndexComponent { static { Map> defaults = new HashMap<>(); Map> buildIn = new HashMap<>(); - defaults.put("default", DefaultSimilarityProvider::new); + defaults.put("classic", ClassicSimilarityProvider::new); defaults.put("BM25", BM25SimilarityProvider::new); - buildIn.put("default", DefaultSimilarityProvider::new); + buildIn.put("classic", ClassicSimilarityProvider::new); buildIn.put("BM25", BM25SimilarityProvider::new); buildIn.put("DFR", DFRSimilarityProvider::new); buildIn.put("IB", IBSimilarityProvider::new); @@ -129,7 +129,7 @@ public final class SimilarityService extends AbstractIndexComponent { @Override public Similarity get(String name) { - MappedFieldType fieldType = mapperService.smartNameFieldType(name); + MappedFieldType fieldType = mapperService.fullName(name); return (fieldType != null && fieldType.similarity() != null) ? fieldType.similarity().get() : defaultSimilarity; } } diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java b/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java index d335e03d9a9..51c8bcf5d7e 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java @@ -19,9 +19,6 @@ package org.elasticsearch.index.snapshots; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; - /** * Represent shard snapshot status */ diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java index 674d1085660..f05f64fa19c 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java @@ -30,7 +30,6 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RateLimiter; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterService; diff --git a/core/src/main/java/org/elasticsearch/index/store/DirectoryUtils.java b/core/src/main/java/org/elasticsearch/index/store/DirectoryUtils.java index ffa69c76ec7..c99bc322708 100644 --- a/core/src/main/java/org/elasticsearch/index/store/DirectoryUtils.java +++ b/core/src/main/java/org/elasticsearch/index/store/DirectoryUtils.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.store; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FileSwitchDirectory; import org.apache.lucene.store.FilterDirectory; -import org.elasticsearch.common.Nullable; /** * Utils for working with {@link Directory} classes. @@ -79,6 +78,6 @@ public final class DirectoryUtils { return defaultValue; } } - + } diff --git a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index 60752dd774b..fcc18f8b678 100644 --- a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -19,7 +19,17 @@ package org.elasticsearch.index.store; -import org.apache.lucene.store.*; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.FileSwitchDirectory; +import org.apache.lucene.store.LockFactory; +import org.apache.lucene.store.MMapDirectory; +import org.apache.lucene.store.NIOFSDirectory; +import org.apache.lucene.store.NativeFSLockFactory; +import org.apache.lucene.store.RateLimitedFSDirectory; +import org.apache.lucene.store.SimpleFSDirectory; +import org.apache.lucene.store.SimpleFSLockFactory; +import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.Constants; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.metrics.CounterMetric; diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java index 1bd023abdb0..ed561876735 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java @@ -21,37 +21,36 @@ package org.elasticsearch.index.store; import org.apache.lucene.store.StoreRateLimiting; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.node.settings.NodeSettingsService; /** * IndexStoreConfig encapsulates node / cluster level configuration for index level {@link IndexStore} instances. * For instance it maintains the node level rate limiter configuration: updates to the cluster that disable or enable - * {@value #INDICES_STORE_THROTTLE_TYPE} or {@value #INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC} are reflected immediately + * indices.store.throttle.type or indices.store.throttle.max_bytes_per_sec are reflected immediately * on all referencing {@link IndexStore} instances */ -public class IndexStoreConfig implements NodeSettingsService.Listener { +public class IndexStoreConfig{ /** * Configures the node / cluster level throttle type. See {@link StoreRateLimiting.Type}. */ - public static final String INDICES_STORE_THROTTLE_TYPE = "indices.store.throttle.type"; + public static final Setting INDICES_STORE_THROTTLE_TYPE_SETTING = new Setting<>("indices.store.throttle.type", StoreRateLimiting.Type.NONE.name(),StoreRateLimiting.Type::fromString, true, Setting.Scope.CLUSTER); /** * Configures the node / cluster level throttle intensity. The default is 10240 MB */ - public static final String INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC = "indices.store.throttle.max_bytes_per_sec"; - private volatile String rateLimitingType; + public static final Setting INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, Setting.Scope.CLUSTER); + private volatile StoreRateLimiting.Type rateLimitingType; private volatile ByteSizeValue rateLimitingThrottle; private final StoreRateLimiting rateLimiting = new StoreRateLimiting(); private final ESLogger logger; public IndexStoreConfig(Settings settings) { logger = Loggers.getLogger(IndexStoreConfig.class, settings); // we don't limit by default (we default to CMS's auto throttle instead): - this.rateLimitingType = settings.get("indices.store.throttle.type", StoreRateLimiting.Type.NONE.name()); + this.rateLimitingType = INDICES_STORE_THROTTLE_TYPE_SETTING.get(settings); rateLimiting.setType(rateLimitingType); - this.rateLimitingThrottle = settings.getAsBytesSize("indices.store.throttle.max_bytes_per_sec", new ByteSizeValue(0)); + this.rateLimitingThrottle = INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.get(settings); rateLimiting.setMaxRate(rateLimitingThrottle); logger.debug("using indices.store.throttle.type [{}], with index.store.throttle.max_bytes_per_sec [{}]", rateLimitingType, rateLimitingThrottle); } @@ -63,22 +62,12 @@ public class IndexStoreConfig implements NodeSettingsService.Listener { return rateLimiting; } - @Override - public void onRefreshSettings(Settings settings) { - String rateLimitingType = settings.get(INDICES_STORE_THROTTLE_TYPE, this.rateLimitingType); - // try and parse the type - StoreRateLimiting.Type.fromString(rateLimitingType); - if (!rateLimitingType.equals(this.rateLimitingType)) { - logger.info("updating indices.store.throttle.type from [{}] to [{}]", this.rateLimitingType, rateLimitingType); - this.rateLimitingType = rateLimitingType; - this.rateLimiting.setType(rateLimitingType); - } + public void setRateLimitingType(StoreRateLimiting.Type rateLimitingType) { + this.rateLimitingType = rateLimitingType; + rateLimiting.setType(rateLimitingType); + } - ByteSizeValue rateLimitingThrottle = settings.getAsBytesSize(INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, this.rateLimitingThrottle); - if (!rateLimitingThrottle.equals(this.rateLimitingThrottle)) { - logger.info("updating indices.store.throttle.max_bytes_per_sec from [{}] to [{}], note, type is [{}]", this.rateLimitingThrottle, rateLimitingThrottle, this.rateLimitingType); - this.rateLimitingThrottle = rateLimitingThrottle; - this.rateLimiting.setMaxRate(rateLimitingThrottle); - } + public void setRateLimitingThrottle(ByteSizeValue rateLimitingThrottle) { + this.rateLimitingThrottle = rateLimitingThrottle; } } diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index 729e2b65b2a..c47770d9805 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -20,9 +20,31 @@ package org.elasticsearch.index.store; import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.index.*; -import org.apache.lucene.store.*; -import org.apache.lucene.util.*; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.IndexFormatTooNewException; +import org.apache.lucene.index.IndexFormatTooOldException; +import org.apache.lucene.index.IndexNotFoundException; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.SegmentCommitInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.store.AlreadyClosedException; +import org.apache.lucene.store.BufferedChecksum; +import org.apache.lucene.store.ByteArrayDataInput; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FilterDirectory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.store.Lock; +import org.apache.lucene.store.SimpleFSDirectory; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.Version; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Strings; @@ -52,10 +74,20 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.ShardId; -import java.io.*; +import java.io.Closeable; +import java.io.EOFException; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; import java.nio.file.NoSuchFileException; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.zip.Adler32; diff --git a/core/src/main/java/org/elasticsearch/index/store/StoreStats.java b/core/src/main/java/org/elasticsearch/index/store/StoreStats.java index 60db82b6562..0eb720456c5 100644 --- a/core/src/main/java/org/elasticsearch/index/store/StoreStats.java +++ b/core/src/main/java/org/elasticsearch/index/store/StoreStats.java @@ -27,10 +27,8 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.index.shard.ShardPath; import java.io.IOException; -import java.nio.file.Path; /** */ diff --git a/core/src/main/java/org/elasticsearch/index/store/VerifyingIndexOutput.java b/core/src/main/java/org/elasticsearch/index/store/VerifyingIndexOutput.java index 87976811926..a4d5039b119 100644 --- a/core/src/main/java/org/elasticsearch/index/store/VerifyingIndexOutput.java +++ b/core/src/main/java/org/elasticsearch/index/store/VerifyingIndexOutput.java @@ -19,12 +19,12 @@ package org.elasticsearch.index.store; -import java.io.IOException; - import org.apache.lucene.store.IndexOutput; import org.elasticsearch.common.lucene.store.FilterIndexOutput; -/** +import java.io.IOException; + +/** * abstract class for verifying what was written. * subclasses override {@link #writeByte(byte)} and {@link #writeBytes(byte[], int, int)} */ @@ -35,7 +35,7 @@ public abstract class VerifyingIndexOutput extends FilterIndexOutput { VerifyingIndexOutput(IndexOutput out) { super("VerifyingIndexOutput(out=" + out.toString() + ")", out); } - + /** * Verifies the checksum and compares the written length with the expected file length. This method should be * called after all data has been written to this output. diff --git a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 5b27d327806..17777756056 100644 --- a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -20,7 +20,12 @@ package org.elasticsearch.index.termvectors; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.index.*; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.MultiFields; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; import org.apache.lucene.index.memory.MemoryIndex; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.termvectors.TermVectorsFilter; @@ -33,21 +38,31 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.get.GetField; import org.elasticsearch.index.get.GetResult; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapperForType; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.search.dfs.AggregatedDfs; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; import static org.elasticsearch.index.mapper.SourceToParse.source; @@ -173,7 +188,7 @@ public class TermVectorsService { /* only keep valid fields */ Set validFields = new HashSet<>(); for (String field : selectedFields) { - MappedFieldType fieldType = indexShard.mapperService().smartNameFieldType(field); + MappedFieldType fieldType = indexShard.mapperService().fullName(field); if (!isValidField(fieldType)) { continue; } @@ -208,7 +223,7 @@ public class TermVectorsService { if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) { analyzer = mapperService.analysisService().analyzer(perFieldAnalyzer.get(field).toString()); } else { - analyzer = mapperService.smartNameFieldType(field).indexAnalyzer(); + analyzer = mapperService.fullName(field).indexAnalyzer(); } if (analyzer == null) { analyzer = mapperService.analysisService().defaultIndexAnalyzer(); @@ -254,7 +269,7 @@ public class TermVectorsService { Set seenFields = new HashSet<>(); Collection getFields = new HashSet<>(); for (IndexableField field : doc.getFields()) { - MappedFieldType fieldType = indexShard.mapperService().smartNameFieldType(field.name()); + MappedFieldType fieldType = indexShard.mapperService().fullName(field.name()); if (!isValidField(fieldType)) { continue; } diff --git a/core/src/main/java/org/elasticsearch/index/translog/BufferingTranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/BufferingTranslogWriter.java deleted file mode 100644 index 2ea33127633..00000000000 --- a/core/src/main/java/org/elasticsearch/index/translog/BufferingTranslogWriter.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.translog; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Channels; -import org.elasticsearch.common.util.concurrent.ReleasableLock; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; -import java.io.OutputStream; -import java.nio.ByteBuffer; - -/** - */ -public final class BufferingTranslogWriter extends TranslogWriter { - private byte[] buffer; - private int bufferCount; - private WrapperOutputStream bufferOs = new WrapperOutputStream(); - - /* the total offset of this file including the bytes written to the file as well as into the buffer */ - private volatile long totalOffset; - - public BufferingTranslogWriter(ShardId shardId, long generation, ChannelReference channelReference, int bufferSize) throws IOException { - super(shardId, generation, channelReference); - this.buffer = new byte[bufferSize]; - this.totalOffset = writtenOffset; - } - - @Override - public Translog.Location add(BytesReference data) throws IOException { - try (ReleasableLock lock = writeLock.acquire()) { - ensureOpen(); - final long offset = totalOffset; - if (data.length() >= buffer.length) { - flush(); - // we use the channel to write, since on windows, writing to the RAF might not be reflected - // when reading through the channel - try { - data.writeTo(channel); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - writtenOffset += data.length(); - totalOffset += data.length(); - } else { - if (data.length() > buffer.length - bufferCount) { - flush(); - } - data.writeTo(bufferOs); - totalOffset += data.length(); - } - operationCounter++; - return new Translog.Location(generation, offset, data.length()); - } - } - - protected final void flush() throws IOException { - assert writeLock.isHeldByCurrentThread(); - if (bufferCount > 0) { - ensureOpen(); - // we use the channel to write, since on windows, writing to the RAF might not be reflected - // when reading through the channel - final int bufferSize = bufferCount; - try { - Channels.writeToChannel(buffer, 0, bufferSize, channel); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - writtenOffset += bufferSize; - bufferCount = 0; - } - } - - @Override - protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException { - try (ReleasableLock lock = readLock.acquire()) { - if (position >= writtenOffset) { - assert targetBuffer.hasArray() : "buffer must have array"; - final int sourcePosition = (int) (position - writtenOffset); - System.arraycopy(buffer, sourcePosition, - targetBuffer.array(), targetBuffer.position(), targetBuffer.limit()); - targetBuffer.position(targetBuffer.limit()); - return; - } - } - // we don't have to have a read lock here because we only write ahead to the file, so all writes has been complete - // for the requested location. - Channels.readFromFileChannelWithEofException(channel, position, targetBuffer); - } - - @Override - public boolean syncNeeded() { - return totalOffset != lastSyncedOffset; - } - - @Override - public synchronized void sync() throws IOException { - if (syncNeeded()) { - ensureOpen(); // this call gives a better exception that the incRef if we are closed by a tragic event - channelReference.incRef(); - try { - final long offsetToSync; - final int opsCounter; - try (ReleasableLock lock = writeLock.acquire()) { - flush(); - offsetToSync = totalOffset; - opsCounter = operationCounter; - } - // we can do this outside of the write lock but we have to protect from - // concurrent syncs - ensureOpen(); // just for kicks - the checkpoint happens or not either way - try { - checkpoint(offsetToSync, opsCounter, channelReference); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - lastSyncedOffset = offsetToSync; - } finally { - channelReference.decRef(); - } - } - } - - class WrapperOutputStream extends OutputStream { - - @Override - public void write(int b) throws IOException { - buffer[bufferCount++] = (byte) b; - } - - @Override - public void write(byte[] b, int off, int len) throws IOException { - // we do safety checked when we decide to use this stream... - System.arraycopy(b, off, buffer, bufferCount, len); - bufferCount += len; - } - } - - @Override - public long sizeInBytes() { - return totalOffset; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/translog/ChannelReference.java b/core/src/main/java/org/elasticsearch/index/translog/ChannelReference.java index 30424222c60..b3f60a4c89f 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/ChannelReference.java +++ b/core/src/main/java/org/elasticsearch/index/translog/ChannelReference.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import java.io.IOException; import java.nio.channels.FileChannel; -import java.nio.file.*; +import java.nio.file.Path; final class ChannelReference extends AbstractRefCounted { private final Path file; diff --git a/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReader.java index d1cd3b1efdb..463c5998f1d 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReader.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.translog; -import org.elasticsearch.common.io.stream.StreamInput; - import java.io.IOException; /** diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 9ad8715ed0c..3f8f0ab54df 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -47,14 +47,21 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShardComponent; -import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.EOFException; import java.io.IOException; import java.nio.channels.FileChannel; -import java.nio.file.*; -import java.util.*; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.nio.file.StandardOpenOption; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Set; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReadWriteLock; @@ -152,19 +159,27 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC writeLock = new ReleasableLock(rwl.writeLock()); this.location = config.getTranslogPath(); Files.createDirectories(this.location); - if (config.getSyncInterval().millis() > 0 && config.getThreadPool() != null) { - syncScheduler = config.getThreadPool().schedule(config.getSyncInterval(), ThreadPool.Names.SAME, new Sync()); - } try { if (translogGeneration != null) { - final Checkpoint checkpoint = Checkpoint.read(location.resolve(CHECKPOINT_FILE_NAME)); + final Checkpoint checkpoint = readCheckpoint(); this.recoveredTranslogs = recoverFromFiles(translogGeneration, checkpoint); if (recoveredTranslogs.isEmpty()) { throw new IllegalStateException("at least one reader must be recovered"); } - current = createWriter(checkpoint.generation + 1); - this.lastCommittedTranslogFileGeneration = translogGeneration.translogFileGeneration; + boolean success = false; + try { + current = createWriter(checkpoint.generation + 1); + this.lastCommittedTranslogFileGeneration = translogGeneration.translogFileGeneration; + success = true; + } finally { + // we have to close all the recovered ones otherwise we leak file handles here + // for instance if we have a lot of tlog and we can't create the writer we keep on holding + // on to all the uncommitted tlog files if we don't close + if (success == false) { + IOUtils.closeWhileHandlingException(recoveredTranslogs); + } + } } else { this.recoveredTranslogs = Collections.emptyList(); IOUtils.rm(location); @@ -352,7 +367,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC TranslogWriter createWriter(long fileGeneration) throws IOException { TranslogWriter newFile; try { - newFile = TranslogWriter.create(config.getType(), shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), new OnCloseRunnable(), config.getBufferSizeBytes(), getChannelFactory()); + newFile = TranslogWriter.create(shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), new OnCloseRunnable(), getChannelFactory(), config.getBufferSize()); } catch (IOException e) { throw new TranslogException(shardId, "failed to create new translog file", e); } @@ -414,15 +429,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return location; } } catch (AlreadyClosedException | IOException ex) { - if (current.getTragicException() != null) { - try { - close(); - } catch (Exception inner) { - ex.addSuppressed(inner); - } - } + closeOnTragicEvent(ex); throw ex; } catch (Throwable e) { + closeOnTragicEvent(e); throw new TranslogException(shardId, "Failed to write operation [" + operation + "]", e); } finally { Releasables.close(out.bytes()); @@ -499,14 +509,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC if (closed.get() == false) { current.sync(); } - } catch (AlreadyClosedException | IOException ex) { - if (current.getTragicException() != null) { - try { - close(); - } catch (Exception inner) { - ex.addSuppressed(inner); - } - } + } catch (Throwable ex) { + closeOnTragicEvent(ex); throw ex; } } @@ -538,10 +542,23 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC ensureOpen(); return current.syncUpTo(location.translogLocation + location.size); } + } catch (Throwable ex) { + closeOnTragicEvent(ex); + throw ex; } return false; } + private void closeOnTragicEvent(Throwable ex) { + if (current.getTragicException() != null) { + try { + close(); + } catch (Exception inner) { + ex.addSuppressed(inner); + } + } + } + /** * return stats */ @@ -705,34 +722,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } } - class Sync implements Runnable { - @Override - public void run() { - // don't re-schedule if its closed..., we are done - if (closed.get()) { - return; - } - final ThreadPool threadPool = config.getThreadPool(); - if (syncNeeded()) { - threadPool.executor(ThreadPool.Names.FLUSH).execute(new Runnable() { - @Override - public void run() { - try { - sync(); - } catch (Exception e) { - logger.warn("failed to sync translog", e); - } - if (closed.get() == false) { - syncScheduler = threadPool.schedule(config.getSyncInterval(), ThreadPool.Names.SAME, Sync.this); - } - } - }); - } else { - syncScheduler = threadPool.schedule(config.getSyncInterval(), ThreadPool.Names.SAME, Sync.this); - } - } - } - public static class Location implements Accountable, Comparable { public final long generation; @@ -1178,7 +1167,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } - public enum Durabilty { + public enum Durability { /** * Async durability - translogs are synced based on a time interval. */ @@ -1426,4 +1415,9 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return current.getTragicException(); } + /** Reads and returns the current checkpoint */ + final Checkpoint readCheckpoint() throws IOException { + return Checkpoint.read(location.resolve(CHECKPOINT_FILE_NAME)); + } + } diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java index e8a8d1803ee..682c3101027 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java @@ -20,13 +20,13 @@ package org.elasticsearch.index.translog; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog.TranslogGeneration; -import org.elasticsearch.indices.memory.IndexingMemoryController; import org.elasticsearch.threadpool.ThreadPool; import java.nio.file.Path; @@ -38,105 +38,38 @@ import java.nio.file.Path; */ public final class TranslogConfig { - public static final String INDEX_TRANSLOG_DURABILITY = "index.translog.durability"; - public static final String INDEX_TRANSLOG_FS_TYPE = "index.translog.fs.type"; - public static final String INDEX_TRANSLOG_SYNC_INTERVAL = "index.translog.sync_interval"; - - private final TimeValue syncInterval; + public static final ByteSizeValue DEFAULT_BUFFER_SIZE = new ByteSizeValue(8, ByteSizeUnit.KB); private final BigArrays bigArrays; - private final ThreadPool threadPool; - private final boolean syncOnEachOperation; - private final int bufferSizeBytes; private volatile TranslogGeneration translogGeneration; - private volatile Translog.Durabilty durabilty = Translog.Durabilty.REQUEST; - private volatile TranslogWriter.Type type; private final IndexSettings indexSettings; private final ShardId shardId; private final Path translogPath; + private final ByteSizeValue bufferSize; /** * Creates a new TranslogConfig instance * @param shardId the shard ID this translog belongs to * @param translogPath the path to use for the transaction log files * @param indexSettings the index settings used to set internal variables - * @param durabilty the default durability setting for the translog * @param bigArrays a bigArrays instance used for temporarily allocating write operations - * @param threadPool a {@link ThreadPool} to schedule async sync durability */ - public TranslogConfig(ShardId shardId, Path translogPath, IndexSettings indexSettings, Translog.Durabilty durabilty, BigArrays bigArrays, @Nullable ThreadPool threadPool) { + public TranslogConfig(ShardId shardId, Path translogPath, IndexSettings indexSettings, BigArrays bigArrays) { + this(shardId, translogPath, indexSettings, bigArrays, DEFAULT_BUFFER_SIZE); + } + + TranslogConfig(ShardId shardId, Path translogPath, IndexSettings indexSettings, BigArrays bigArrays, ByteSizeValue bufferSize) { + this.bufferSize = bufferSize; this.indexSettings = indexSettings; this.shardId = shardId; this.translogPath = translogPath; - this.durabilty = durabilty; - this.threadPool = threadPool; this.bigArrays = bigArrays; - this.type = TranslogWriter.Type.fromString(indexSettings.getSettings().get(INDEX_TRANSLOG_FS_TYPE, TranslogWriter.Type.BUFFERED.name())); - this.bufferSizeBytes = (int) IndexingMemoryController.SHARD_TRANSLOG_BUFFER.bytes(); - - syncInterval = indexSettings.getSettings().getAsTime(INDEX_TRANSLOG_SYNC_INTERVAL, TimeValue.timeValueSeconds(5)); - if (syncInterval.millis() > 0 && threadPool != null) { - syncOnEachOperation = false; - } else if (syncInterval.millis() == 0) { - syncOnEachOperation = true; - } else { - syncOnEachOperation = false; - } - } - - /** - * Returns a {@link ThreadPool} to schedule async durability operations - */ - public ThreadPool getThreadPool() { - return threadPool; - } - - /** - * Returns the current durability mode of this translog. - */ - public Translog.Durabilty getDurabilty() { - return durabilty; - } - - /** - * Sets the current durability mode for the translog. - */ - public void setDurabilty(Translog.Durabilty durabilty) { - this.durabilty = durabilty; - } - - /** - * Returns the translog type - */ - public TranslogWriter.Type getType() { - return type; - } - - /** - * Sets the TranslogType for this Translog. The change will affect all subsequent translog files. - */ - public void setType(TranslogWriter.Type type) { - this.type = type; } /** * Returns true iff each low level operation shoudl be fsynced */ public boolean isSyncOnEachOperation() { - return syncOnEachOperation; - } - - /** - * Returns the current translog buffer size. - */ - public int getBufferSizeBytes() { - return bufferSizeBytes; - } - - /** - * Returns the current async fsync interval - */ - public TimeValue getSyncInterval() { - return syncInterval; + return indexSettings.getTranslogSyncInterval().millis() == 0; } /** @@ -184,4 +117,11 @@ public final class TranslogConfig { public void setTranslogGeneration(TranslogGeneration translogGeneration) { this.translogGeneration = translogGeneration; } + + /** + * The translog buffer size. Default is 8kb + */ + public ByteSizeValue getBufferSize() { + return bufferSize; + } } diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java index d7077fd90ad..71dff6ec36e 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java @@ -138,7 +138,7 @@ public abstract class TranslogReader implements Closeable, Comparable onClose, int bufferSize, ChannelFactory channelFactory) throws IOException { + public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, Callback onClose, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException { final BytesRef ref = new BytesRef(translogUUID); final int headerLength = CodecUtil.headerLength(TRANSLOG_CODEC) + ref.length + RamUsageEstimator.NUM_BYTES_INT; final FileChannel channel = channelFactory.open(file); @@ -82,7 +82,7 @@ public class TranslogWriter extends TranslogReader { out.writeBytes(ref.bytes, ref.offset, ref.length); channel.force(false); writeCheckpoint(headerLength, 0, file.getParent(), fileGeneration, StandardOpenOption.WRITE); - final TranslogWriter writer = type.create(shardId, fileGeneration, new ChannelReference(file, fileGeneration, channel, onClose), bufferSize); + final TranslogWriter writer = new TranslogWriter(shardId, fileGeneration, new ChannelReference(file, fileGeneration, channel, onClose), bufferSize); return writer; } catch (Throwable throwable) { IOUtils.closeWhileHandlingException(channel); @@ -101,80 +101,57 @@ public class TranslogWriter extends TranslogReader { return tragedy; } - public enum Type { - - SIMPLE() { - @Override - public TranslogWriter create(ShardId shardId, long generation, ChannelReference channelReference, int bufferSize) throws IOException { - return new TranslogWriter(shardId, generation, channelReference); - } - }, - BUFFERED() { - @Override - public TranslogWriter create(ShardId shardId, long generation, ChannelReference channelReference, int bufferSize) throws IOException { - return new BufferingTranslogWriter(shardId, generation, channelReference, bufferSize); - } - }; - - public abstract TranslogWriter create(ShardId shardId, long generation, ChannelReference raf, int bufferSize) throws IOException; - - public static Type fromString(String type) { - if (SIMPLE.name().equalsIgnoreCase(type)) { - return SIMPLE; - } else if (BUFFERED.name().equalsIgnoreCase(type)) { - return BUFFERED; - } - throw new IllegalArgumentException("No translog fs type [" + type + "]"); - } - } - - protected final void closeWithTragicEvent(Throwable throwable) throws IOException { - try (ReleasableLock lock = writeLock.acquire()) { - if (tragedy == null) { - tragedy = throwable; - } else { - tragedy.addSuppressed(throwable); - } - close(); + private synchronized final void closeWithTragicEvent(Throwable throwable) throws IOException { + assert throwable != null : "throwable must not be null in a tragic event"; + if (tragedy == null) { + tragedy = throwable; + } else { + tragedy.addSuppressed(throwable); } + close(); } /** * add the given bytes to the translog and return the location they were written at */ - public Translog.Location add(BytesReference data) throws IOException { - final long position; - try (ReleasableLock lock = writeLock.acquire()) { - ensureOpen(); - position = writtenOffset; - try { - data.writeTo(channel); - } catch (Throwable e) { - closeWithTragicEvent(e); - throw e; - } - writtenOffset = writtenOffset + data.length(); - operationCounter++;; + public synchronized Translog.Location add(BytesReference data) throws IOException { + ensureOpen(); + final long offset = totalOffset; + try { + data.writeTo(outputStream); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; } - return new Translog.Location(generation, position, data.length()); - } - - /** - * change the size of the internal buffer if relevant - */ - public void updateBufferSize(int bufferSize) throws TranslogException { + totalOffset += data.length(); + operationCounter++; + return new Translog.Location(generation, offset, data.length()); } /** * write all buffered ops to disk and fsync file */ - public synchronized void sync() throws IOException { // synchronized to ensure only one sync happens a time - // check if we really need to sync here... + public void sync() throws IOException { if (syncNeeded()) { - try (ReleasableLock lock = writeLock.acquire()) { - ensureOpen(); - checkpoint(writtenOffset, operationCounter, channelReference); - lastSyncedOffset = writtenOffset; + synchronized (this) { + ensureOpen(); // this call gives a better exception that the incRef if we are closed by a tragic event + channelReference.incRef(); + try { + final long offsetToSync; + final int opsCounter; + outputStream.flush(); + offsetToSync = totalOffset; + opsCounter = operationCounter; + try { + checkpoint(offsetToSync, opsCounter, channelReference); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; + } + lastSyncedOffset = offsetToSync; + } finally { + channelReference.decRef(); + } } } } @@ -182,9 +159,7 @@ public class TranslogWriter extends TranslogReader { /** * returns true if there are buffered ops */ - public boolean syncNeeded() { - return writtenOffset != lastSyncedOffset; // by default nothing is buffered - } + public boolean syncNeeded() { return totalOffset != lastSyncedOffset; } @Override public int totalOperations() { @@ -193,14 +168,7 @@ public class TranslogWriter extends TranslogReader { @Override public long sizeInBytes() { - return writtenOffset; - } - - - /** - * Flushes the buffer if the translog is buffered. - */ - protected void flush() throws IOException { + return totalOffset; } /** @@ -212,7 +180,7 @@ public class TranslogWriter extends TranslogReader { channelReference.incRef(); boolean success = false; try { - TranslogReader reader = new InnerReader(this.generation, firstOperationOffset, channelReference); + final TranslogReader reader = new InnerReader(this.generation, firstOperationOffset, channelReference); success = true; return reader; } finally { @@ -227,16 +195,18 @@ public class TranslogWriter extends TranslogReader { */ public ImmutableTranslogReader immutableReader() throws TranslogException { if (channelReference.tryIncRef()) { - try (ReleasableLock lock = writeLock.acquire()) { - ensureOpen(); - flush(); - ImmutableTranslogReader reader = new ImmutableTranslogReader(this.generation, channelReference, firstOperationOffset, writtenOffset, operationCounter); - channelReference.incRef(); // for new reader - return reader; - } catch (Exception e) { - throw new TranslogException(shardId, "exception while creating an immutable reader", e); - } finally { - channelReference.decRef(); + synchronized (this) { + try { + ensureOpen(); + outputStream.flush(); + ImmutableTranslogReader reader = new ImmutableTranslogReader(this.generation, channelReference, firstOperationOffset, getWrittenOffset(), operationCounter); + channelReference.incRef(); // for new reader + return reader; + } catch (Exception e) { + throw new TranslogException(shardId, "exception while creating an immutable reader", e); + } finally { + channelReference.decRef(); + } } } else { throw new TranslogException(shardId, "can't increment channel [" + channelReference + "] ref count"); @@ -249,6 +219,10 @@ public class TranslogWriter extends TranslogReader { return new BytesArray(buffer.array()).equals(expectedBytes); } + private long getWrittenOffset() throws IOException { + return channelReference.getChannel().position(); + } + /** * this class is used when one wants a reference to this file which exposes all recently written operation. * as such it needs access to the internals of the current reader @@ -289,13 +263,24 @@ public class TranslogWriter extends TranslogReader { } @Override - protected void readBytes(ByteBuffer buffer, long position) throws IOException { - try (ReleasableLock lock = readLock.acquire()) { - Channels.readFromFileChannelWithEofException(channel, position, buffer); + protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException { + if (position+targetBuffer.remaining() > getWrittenOffset()) { + synchronized (this) { + // we only flush here if it's really really needed - try to minimize the impact of the read operation + // in some cases ie. a tragic event we might still be able to read the relevant value + // which is not really important in production but some test can make most strict assumptions + // if we don't fail in this call unless absolutely necessary. + if (position+targetBuffer.remaining() > getWrittenOffset()) { + outputStream.flush(); + } + } } + // we don't have to have a lock here because we only write ahead to the file, so all writes has been complete + // for the requested location. + Channels.readFromFileChannelWithEofException(channel, position, targetBuffer); } - protected synchronized void checkpoint(long lastSyncPosition, int operationCounter, ChannelReference channelReference) throws IOException { + private synchronized void checkpoint(long lastSyncPosition, int operationCounter, ChannelReference channelReference) throws IOException { channelReference.getChannel().force(false); writeCheckpoint(lastSyncPosition, operationCounter, channelReference.getPath().getParent(), channelReference.getGeneration(), StandardOpenOption.WRITE); } @@ -321,4 +306,32 @@ public class TranslogWriter extends TranslogReader { throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed", tragedy); } } + + + private final class BufferedChannelOutputStream extends BufferedOutputStream { + + public BufferedChannelOutputStream(OutputStream out, int size) throws IOException { + super(out, size); + } + + @Override + public synchronized void flush() throws IOException { + if (count > 0) { + try { + ensureOpen(); + super.flush(); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; + } + } + } + + @Override + public void close() throws IOException { + // the stream is intentionally not closed because + // closing it will close the FileChannel + throw new IllegalStateException("never close this stream"); + } + } } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index 6878002c015..ebeca4e235b 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -22,18 +22,85 @@ package org.elasticsearch.indices; import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.common.geo.ShapesAvailability; +import org.elasticsearch.common.geo.builders.ShapeBuilderRegistry; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; -import org.elasticsearch.index.mapper.core.*; +import org.elasticsearch.index.mapper.core.BinaryFieldMapper; +import org.elasticsearch.index.mapper.core.BooleanFieldMapper; +import org.elasticsearch.index.mapper.core.ByteFieldMapper; +import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.core.FloatFieldMapper; +import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.ShortFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; -import org.elasticsearch.index.mapper.internal.*; +import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.internal.IdFieldMapper; +import org.elasticsearch.index.mapper.internal.IndexFieldMapper; +import org.elasticsearch.index.mapper.internal.ParentFieldMapper; +import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; +import org.elasticsearch.index.mapper.internal.SourceFieldMapper; +import org.elasticsearch.index.mapper.internal.TTLFieldMapper; +import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; +import org.elasticsearch.index.mapper.internal.UidFieldMapper; +import org.elasticsearch.index.mapper.internal.VersionFieldMapper; import org.elasticsearch.index.mapper.ip.IpFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.BoolQueryParser; +import org.elasticsearch.index.query.BoostingQueryParser; +import org.elasticsearch.index.query.CommonTermsQueryParser; +import org.elasticsearch.index.query.ConstantScoreQueryParser; +import org.elasticsearch.index.query.DisMaxQueryParser; +import org.elasticsearch.index.query.ExistsQueryParser; +import org.elasticsearch.index.query.FieldMaskingSpanQueryParser; +import org.elasticsearch.index.query.FuzzyQueryParser; +import org.elasticsearch.index.query.GeoBoundingBoxQueryParser; +import org.elasticsearch.index.query.GeoDistanceQueryParser; +import org.elasticsearch.index.query.GeoDistanceRangeQueryParser; +import org.elasticsearch.index.query.GeoPolygonQueryParser; +import org.elasticsearch.index.query.GeoShapeQueryParser; +import org.elasticsearch.index.query.GeohashCellQuery; +import org.elasticsearch.index.query.HasChildQueryParser; +import org.elasticsearch.index.query.HasParentQueryParser; +import org.elasticsearch.index.query.IdsQueryParser; +import org.elasticsearch.index.query.IndicesQueryParser; +import org.elasticsearch.index.query.MatchAllQueryParser; +import org.elasticsearch.index.query.MatchNoneQueryParser; +import org.elasticsearch.index.query.MatchQueryParser; +import org.elasticsearch.index.query.MoreLikeThisQueryParser; +import org.elasticsearch.index.query.MultiMatchQueryParser; +import org.elasticsearch.index.query.NestedQueryParser; +import org.elasticsearch.index.query.PrefixQueryParser; +import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.QueryStringQueryParser; +import org.elasticsearch.index.query.RangeQueryParser; +import org.elasticsearch.index.query.RegexpQueryParser; +import org.elasticsearch.index.query.ScriptQueryParser; +import org.elasticsearch.index.query.SimpleQueryStringParser; +import org.elasticsearch.index.query.SpanContainingQueryParser; +import org.elasticsearch.index.query.SpanFirstQueryParser; +import org.elasticsearch.index.query.SpanMultiTermQueryParser; +import org.elasticsearch.index.query.SpanNearQueryParser; +import org.elasticsearch.index.query.SpanNotQueryParser; +import org.elasticsearch.index.query.SpanOrQueryParser; +import org.elasticsearch.index.query.SpanTermQueryParser; +import org.elasticsearch.index.query.SpanWithinQueryParser; +import org.elasticsearch.index.query.TemplateQueryParser; +import org.elasticsearch.index.query.TermQueryParser; +import org.elasticsearch.index.query.TermsQueryParser; +import org.elasticsearch.index.query.TypeQueryParser; +import org.elasticsearch.index.query.WildcardQueryParser; +import org.elasticsearch.index.query.WrapperQueryParser; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.indices.cache.query.IndicesQueryCache; @@ -122,7 +189,7 @@ public class IndicesModule extends AbstractModule { registerQueryParser(ExistsQueryParser.class); registerQueryParser(MatchNoneQueryParser.class); - if (ShapesAvailability.JTS_AVAILABLE) { + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { registerQueryParser(GeoShapeQueryParser.class); } } @@ -142,11 +209,10 @@ public class IndicesModule extends AbstractModule { registerMapper(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser()); registerMapper(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser()); registerMapper(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); - registerMapper(TypeParsers.MULTI_FIELD_CONTENT_TYPE, TypeParsers.multiFieldConverterTypeParser); registerMapper(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); registerMapper(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); - if (ShapesAvailability.JTS_AVAILABLE) { + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { registerMapper(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser()); } } @@ -218,6 +284,7 @@ public class IndicesModule extends AbstractModule { bind(IndicesFieldDataCacheListener.class).asEagerSingleton(); bind(TermVectorsService.class).asEagerSingleton(); bind(NodeServicesProvider.class).asEagerSingleton(); + bind(ShapeBuilderRegistry.class).asEagerSingleton(); } // public for testing diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index dead72aee8b..36ed70ae65a 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -36,13 +36,19 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; import org.elasticsearch.gateway.MetaDataStateFormat; -import org.elasticsearch.index.*; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; @@ -58,12 +64,18 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.plugins.PluginsService; import java.io.IOException; import java.nio.file.Files; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -100,9 +112,9 @@ public class IndicesService extends AbstractLifecycleComponent i @Inject public IndicesService(Settings settings, PluginsService pluginsService, NodeEnvironment nodeEnv, - NodeSettingsService nodeSettingsService, AnalysisRegistry analysisRegistry, - IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService, MapperRegistry mapperRegistry) { + ClusterSettings clusterSettings, AnalysisRegistry analysisRegistry, + IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, + ClusterService clusterService, MapperRegistry mapperRegistry) { super(settings); this.pluginsService = pluginsService; this.nodeEnv = nodeEnv; @@ -113,7 +125,9 @@ public class IndicesService extends AbstractLifecycleComponent i this.clusterService = clusterService; this.indexNameExpressionResolver = indexNameExpressionResolver; this.mapperRegistry = mapperRegistry; - nodeSettingsService.addListener(indexStoreConfig); + clusterSettings.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, indexStoreConfig::setRateLimitingType); + clusterSettings.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, indexStoreConfig::setRateLimitingThrottle); + } @Override diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java index 30ec403942f..8c3ba6dc47d 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java @@ -19,7 +19,6 @@ package org.elasticsearch.indices; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 350678da117..7b7fca4b37b 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -27,10 +27,16 @@ import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.*; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.AnalyzerProvider; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; import java.io.IOException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; /** * The AnalysisModule is the main extension point for node and index level analysis components. The lucene classes diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 1ae41560067..3e63b6fba65 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -31,7 +31,11 @@ import java.io.InputStream; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java index cb07779164e..36795c66da4 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java @@ -62,8 +62,8 @@ import org.apache.lucene.analysis.util.CharArraySet; import org.elasticsearch.Version; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.analysis.PatternAnalyzer; -import org.elasticsearch.index.analysis.StandardHtmlStripAnalyzer; import org.elasticsearch.index.analysis.SnowballAnalyzer; +import org.elasticsearch.index.analysis.StandardHtmlStripAnalyzer; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; import java.util.Locale; @@ -83,7 +83,7 @@ public enum PreBuiltAnalyzers { a = new StandardAnalyzer(); } a.setVersion(version.luceneVersion); - return a; + return a; } }, @@ -432,7 +432,7 @@ public enum PreBuiltAnalyzers { return a; } }, - + SORANI { @Override protected Analyzer create(Version version) { diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java index 70d1a25b43e..631d8bac76d 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java @@ -41,7 +41,19 @@ import org.apache.lucene.analysis.fa.PersianNormalizationFilter; import org.apache.lucene.analysis.fr.FrenchAnalyzer; import org.apache.lucene.analysis.hi.HindiNormalizationFilter; import org.apache.lucene.analysis.in.IndicNormalizationFilter; -import org.apache.lucene.analysis.miscellaneous.*; +import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter; +import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter; +import org.apache.lucene.analysis.miscellaneous.LengthFilter; +import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilter; +import org.apache.lucene.analysis.miscellaneous.Lucene43LengthFilter; +import org.apache.lucene.analysis.miscellaneous.Lucene43TrimFilter; +import org.apache.lucene.analysis.miscellaneous.Lucene47WordDelimiterFilter; +import org.apache.lucene.analysis.miscellaneous.ScandinavianFoldingFilter; +import org.apache.lucene.analysis.miscellaneous.ScandinavianNormalizationFilter; +import org.apache.lucene.analysis.miscellaneous.TrimFilter; +import org.apache.lucene.analysis.miscellaneous.TruncateTokenFilter; +import org.apache.lucene.analysis.miscellaneous.UniqueTokenFilter; +import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenFilter; import org.apache.lucene.analysis.ngram.Lucene43NGramTokenFilter; @@ -57,11 +69,12 @@ import org.apache.lucene.analysis.tr.ApostropheFilter; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.analysis.util.ElisionFilter; import org.elasticsearch.Version; -import org.elasticsearch.index.analysis.*; +import org.elasticsearch.index.analysis.DelimitedPayloadTokenFilterFactory; import org.elasticsearch.index.analysis.LimitTokenCountFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; -import org.tartarus.snowball.ext.FrenchStemmer; import org.tartarus.snowball.ext.DutchStemmer; +import org.tartarus.snowball.ext.FrenchStemmer; import java.util.Locale; @@ -396,7 +409,7 @@ public enum PreBuiltTokenFilters { return new CJKWidthFilter(tokenStream); } }, - + DECIMAL_DIGIT(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java index 88af1fbb5b5..7b1563388e0 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java @@ -24,9 +24,9 @@ import org.apache.lucene.analysis.core.LetterTokenizer; import org.apache.lucene.analysis.core.LowerCaseTokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; -import org.apache.lucene.analysis.ngram.NGramTokenizer; import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenizer; import org.apache.lucene.analysis.ngram.Lucene43NGramTokenizer; +import org.apache.lucene.analysis.ngram.NGramTokenizer; import org.apache.lucene.analysis.path.PathHierarchyTokenizer; import org.apache.lucene.analysis.pattern.PatternTokenizer; import org.apache.lucene.analysis.standard.ClassicTokenizer; @@ -116,7 +116,7 @@ public enum PreBuiltTokenizers { protected Tokenizer create(Version version) { // see NGramTokenizerFactory for an explanation of this logic: // 4.4 patch was used before 4.4 was released - if (version.onOrAfter(org.elasticsearch.Version.V_0_90_2) && + if (version.onOrAfter(org.elasticsearch.Version.V_0_90_2) && version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_3)) { return new NGramTokenizer(); } else { @@ -130,7 +130,7 @@ public enum PreBuiltTokenizers { protected Tokenizer create(Version version) { // see EdgeNGramTokenizerFactory for an explanation of this logic: // 4.4 patch was used before 4.4 was released - if (version.onOrAfter(org.elasticsearch.Version.V_0_90_2) && + if (version.onOrAfter(org.elasticsearch.Version.V_0_90_2) && version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_3)) { return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); } else { diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 33f3c127d67..0e1532bc6b3 100644 --- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -25,9 +25,10 @@ import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.node.settings.NodeSettingsService; import java.util.ArrayList; import java.util.List; @@ -45,25 +46,17 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { private final ConcurrentMap breakers = new ConcurrentHashMap(); - // Old pre-1.4.0 backwards compatible settings - public static final String OLD_CIRCUIT_BREAKER_MAX_BYTES_SETTING = "indices.fielddata.breaker.limit"; - public static final String OLD_CIRCUIT_BREAKER_OVERHEAD_SETTING = "indices.fielddata.breaker.overhead"; + public static final Setting TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.total.limit", "70%", true, Setting.Scope.CLUSTER); - public static final String TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = "indices.breaker.total.limit"; - public static final String DEFAULT_TOTAL_CIRCUIT_BREAKER_LIMIT = "70%"; + public static final Setting FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", true, Setting.Scope.CLUSTER); + public static final Setting FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, true, Setting.Scope.CLUSTER); + public static final Setting FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING = new Setting<>("indices.breaker.fielddata.type", "memory", CircuitBreaker.Type::parseValue, false, Setting.Scope.CLUSTER); - public static final String FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = "indices.breaker.fielddata.limit"; - public static final String FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = "indices.breaker.fielddata.overhead"; - public static final String FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING = "indices.breaker.fielddata.type"; - public static final String DEFAULT_FIELDDATA_BREAKER_LIMIT = "60%"; - public static final double DEFAULT_FIELDDATA_OVERHEAD_CONSTANT = 1.03; + public static final Setting REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.request.limit", "40%", true, Setting.Scope.CLUSTER); + public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, true, Setting.Scope.CLUSTER); + public static final Setting REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = new Setting<>("indices.breaker.request.type", "memory", CircuitBreaker.Type::parseValue, false, Setting.Scope.CLUSTER); - public static final String REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = "indices.breaker.request.limit"; - public static final String REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = "indices.breaker.request.overhead"; - public static final String REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = "indices.breaker.request.type"; - public static final String DEFAULT_REQUEST_BREAKER_LIMIT = "40%"; - public static final String DEFAULT_BREAKER_TYPE = "memory"; private volatile BreakerSettings parentSettings; private volatile BreakerSettings fielddataSettings; @@ -73,41 +66,21 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { private final AtomicLong parentTripCount = new AtomicLong(0); @Inject - public HierarchyCircuitBreakerService(Settings settings, NodeSettingsService nodeSettingsService) { + public HierarchyCircuitBreakerService(Settings settings, ClusterSettings clusterSettings) { super(settings); - - // This uses the old InternalCircuitBreakerService.CIRCUIT_BREAKER_MAX_BYTES_SETTING - // setting to keep backwards compatibility with 1.3, it can be safely - // removed when compatibility with 1.3 is no longer needed - String compatibilityFielddataLimitDefault = DEFAULT_FIELDDATA_BREAKER_LIMIT; - ByteSizeValue compatibilityFielddataLimit = settings.getAsMemory(OLD_CIRCUIT_BREAKER_MAX_BYTES_SETTING, null); - if (compatibilityFielddataLimit != null) { - compatibilityFielddataLimitDefault = compatibilityFielddataLimit.toString(); - } - - // This uses the old InternalCircuitBreakerService.CIRCUIT_BREAKER_OVERHEAD_SETTING - // setting to keep backwards compatibility with 1.3, it can be safely - // removed when compatibility with 1.3 is no longer needed - double compatibilityFielddataOverheadDefault = DEFAULT_FIELDDATA_OVERHEAD_CONSTANT; - Double compatibilityFielddataOverhead = settings.getAsDouble(OLD_CIRCUIT_BREAKER_OVERHEAD_SETTING, null); - if (compatibilityFielddataOverhead != null) { - compatibilityFielddataOverheadDefault = compatibilityFielddataOverhead; - } - this.fielddataSettings = new BreakerSettings(CircuitBreaker.FIELDDATA, - settings.getAsMemory(FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, compatibilityFielddataLimitDefault).bytes(), - settings.getAsDouble(FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, compatibilityFielddataOverheadDefault), - CircuitBreaker.Type.parseValue(settings.get(FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, DEFAULT_BREAKER_TYPE)) + FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(), + FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.get(settings), + FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING.get(settings) ); this.requestSettings = new BreakerSettings(CircuitBreaker.REQUEST, - settings.getAsMemory(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, DEFAULT_REQUEST_BREAKER_LIMIT).bytes(), - settings.getAsDouble(REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.0), - CircuitBreaker.Type.parseValue(settings.get(REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, DEFAULT_BREAKER_TYPE)) + REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(), + REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.get(settings), + REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.get(settings) ); - this.parentSettings = new BreakerSettings(CircuitBreaker.PARENT, - settings.getAsMemory(TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, DEFAULT_TOTAL_CIRCUIT_BREAKER_LIMIT).bytes(), 1.0, CircuitBreaker.Type.PARENT); + this.parentSettings = new BreakerSettings(CircuitBreaker.PARENT, TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(), 1.0, CircuitBreaker.Type.PARENT); if (logger.isTraceEnabled()) { logger.trace("parent circuit breaker with settings {}", this.parentSettings); } @@ -115,52 +88,38 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { registerBreaker(this.requestSettings); registerBreaker(this.fielddataSettings); - nodeSettingsService.addListener(new ApplySettings()); + clusterSettings.addSettingsUpdateConsumer(TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, this::setTotalCircuitBreakerLimit, this::validateTotalCircuitBreakerLimit); + clusterSettings.addSettingsUpdateConsumer(FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setFieldDataBreakerLimit); + clusterSettings.addSettingsUpdateConsumer(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setRequestBreakerLimit); + } + private void setRequestBreakerLimit(ByteSizeValue newRequestMax, Double newRequestOverhead) { + BreakerSettings newRequestSettings = new BreakerSettings(CircuitBreaker.REQUEST, newRequestMax.bytes(), newRequestOverhead, + HierarchyCircuitBreakerService.this.requestSettings.getType()); + registerBreaker(newRequestSettings); + HierarchyCircuitBreakerService.this.requestSettings = newRequestSettings; + logger.info("Updated breaker settings request: {}", newRequestSettings); } - public class ApplySettings implements NodeSettingsService.Listener { + private void setFieldDataBreakerLimit(ByteSizeValue newFielddataMax, Double newFielddataOverhead) { + long newFielddataLimitBytes = newFielddataMax == null ? HierarchyCircuitBreakerService.this.fielddataSettings.getLimit() : newFielddataMax.bytes(); + newFielddataOverhead = newFielddataOverhead == null ? HierarchyCircuitBreakerService.this.fielddataSettings.getOverhead() : newFielddataOverhead; + BreakerSettings newFielddataSettings = new BreakerSettings(CircuitBreaker.FIELDDATA, newFielddataLimitBytes, newFielddataOverhead, + HierarchyCircuitBreakerService.this.fielddataSettings.getType()); + registerBreaker(newFielddataSettings); + HierarchyCircuitBreakerService.this.fielddataSettings = newFielddataSettings; + logger.info("Updated breaker settings field data: {}", newFielddataSettings); - @Override - public void onRefreshSettings(Settings settings) { + } - // Fielddata settings - ByteSizeValue newFielddataMax = settings.getAsMemory(FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, null); - Double newFielddataOverhead = settings.getAsDouble(FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, null); - if (newFielddataMax != null || newFielddataOverhead != null) { - long newFielddataLimitBytes = newFielddataMax == null ? HierarchyCircuitBreakerService.this.fielddataSettings.getLimit() : newFielddataMax.bytes(); - newFielddataOverhead = newFielddataOverhead == null ? HierarchyCircuitBreakerService.this.fielddataSettings.getOverhead() : newFielddataOverhead; + private boolean validateTotalCircuitBreakerLimit(ByteSizeValue byteSizeValue) { + BreakerSettings newParentSettings = new BreakerSettings(CircuitBreaker.PARENT, byteSizeValue.bytes(), 1.0, CircuitBreaker.Type.PARENT); + validateSettings(new BreakerSettings[]{newParentSettings}); + return true; + } - BreakerSettings newFielddataSettings = new BreakerSettings(CircuitBreaker.FIELDDATA, newFielddataLimitBytes, newFielddataOverhead, - HierarchyCircuitBreakerService.this.fielddataSettings.getType()); - registerBreaker(newFielddataSettings); - HierarchyCircuitBreakerService.this.fielddataSettings = newFielddataSettings; - logger.info("Updated breaker settings fielddata: {}", newFielddataSettings); - } - - // Request settings - ByteSizeValue newRequestMax = settings.getAsMemory(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, null); - Double newRequestOverhead = settings.getAsDouble(REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, null); - if (newRequestMax != null || newRequestOverhead != null) { - long newRequestLimitBytes = newRequestMax == null ? HierarchyCircuitBreakerService.this.requestSettings.getLimit() : newRequestMax.bytes(); - newRequestOverhead = newRequestOverhead == null ? HierarchyCircuitBreakerService.this.requestSettings.getOverhead() : newRequestOverhead; - - BreakerSettings newRequestSettings = new BreakerSettings(CircuitBreaker.REQUEST, newRequestLimitBytes, newRequestOverhead, - HierarchyCircuitBreakerService.this.requestSettings.getType()); - registerBreaker(newRequestSettings); - HierarchyCircuitBreakerService.this.requestSettings = newRequestSettings; - logger.info("Updated breaker settings request: {}", newRequestSettings); - } - - // Parent settings - long oldParentMax = HierarchyCircuitBreakerService.this.parentSettings.getLimit(); - ByteSizeValue newParentMax = settings.getAsMemory(TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, null); - if (newParentMax != null && (newParentMax.bytes() != oldParentMax)) { - BreakerSettings newParentSettings = new BreakerSettings(CircuitBreaker.PARENT, newParentMax.bytes(), 1.0, CircuitBreaker.Type.PARENT); - validateSettings(new BreakerSettings[]{newParentSettings}); - HierarchyCircuitBreakerService.this.parentSettings = newParentSettings; - logger.info("Updated breaker settings parent: {}", newParentSettings); - } - } + private void setTotalCircuitBreakerLimit(ByteSizeValue byteSizeValue) { + BreakerSettings newParentSettings = new BreakerSettings(CircuitBreaker.PARENT, byteSizeValue.bytes(), 1.0, CircuitBreaker.Type.PARENT); + this.parentSettings = newParentSettings; } /** diff --git a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java index 6628252d8eb..a00cc7e787c 100644 --- a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java @@ -29,7 +29,11 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.cache.*; +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; +import org.elasticsearch.common.cache.CacheLoader; +import org.elasticsearch.common.cache.RemovalListener; +import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -47,7 +51,11 @@ import org.elasticsearch.search.query.QueryPhase; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.threadpool.ThreadPool; -import java.util.*; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.Iterator; +import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; @@ -71,13 +79,9 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis * since we are checking on the cluster state IndexMetaData always. */ public static final String INDEX_CACHE_REQUEST_ENABLED = "index.requests.cache.enable"; - @Deprecated - public static final String DEPRECATED_INDEX_CACHE_REQUEST_ENABLED = "index.cache.query.enable"; public static final String INDICES_CACHE_REQUEST_CLEAN_INTERVAL = "indices.requests.cache.clean_interval"; public static final String INDICES_CACHE_QUERY_SIZE = "indices.requests.cache.size"; - @Deprecated - public static final String DEPRECATED_INDICES_CACHE_QUERY_SIZE = "indices.cache.query.size"; public static final String INDICES_CACHE_QUERY_EXPIRE = "indices.requests.cache.expire"; private static final Set CACHEABLE_SEARCH_TYPES = EnumSet.of(SearchType.QUERY_THEN_FETCH, SearchType.QUERY_AND_FETCH); @@ -105,19 +109,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis this.threadPool = threadPool; this.cleanInterval = settings.getAsTime(INDICES_CACHE_REQUEST_CLEAN_INTERVAL, TimeValue.timeValueSeconds(60)); - String size = settings.get(INDICES_CACHE_QUERY_SIZE); - if (size == null) { - size = settings.get(DEPRECATED_INDICES_CACHE_QUERY_SIZE); - if (size != null) { - deprecationLogger.deprecated("The [" + DEPRECATED_INDICES_CACHE_QUERY_SIZE - + "] settings is now deprecated, use [" + INDICES_CACHE_QUERY_SIZE + "] instead"); - } - } - if (size == null) { - // this cache can be very small yet still be very effective - size = "1%"; - } - this.size = size; + this.size = settings.get(INDICES_CACHE_QUERY_SIZE, "1%"); this.expire = settings.getAsTime(INDICES_CACHE_QUERY_EXPIRE, null); buildCache(); @@ -127,18 +119,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis } private boolean isCacheEnabled(Settings settings, boolean defaultEnable) { - Boolean enable = settings.getAsBoolean(INDEX_CACHE_REQUEST_ENABLED, null); - if (enable == null) { - enable = settings.getAsBoolean(DEPRECATED_INDEX_CACHE_REQUEST_ENABLED, null); - if (enable != null) { - deprecationLogger.deprecated("The [" + DEPRECATED_INDEX_CACHE_REQUEST_ENABLED - + "] settings is now deprecated, use [" + INDEX_CACHE_REQUEST_ENABLED + "] instead"); - } - } - if (enable == null) { - enable = defaultEnable; - } - return enable; + return settings.getAsBoolean(INDEX_CACHE_REQUEST_ENABLED, defaultEnable); } private void buildCache() { diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 3728628f755..5d26297073c 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -33,7 +33,12 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RestoreSource; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -50,7 +55,11 @@ import org.elasticsearch.index.IndexShardAlreadyExistsException; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardState; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.flush.SyncedFlushService; @@ -64,7 +73,11 @@ import org.elasticsearch.search.SearchService; import org.elasticsearch.snapshots.RestoreService; import org.elasticsearch.threadpool.ThreadPool; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.concurrent.ConcurrentMap; /** @@ -446,7 +459,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent { try { if (indexShard.recoverFromStore(nodes.localNode())) { - shardStateAction.shardStarted(shardRouting, indexMetaData.getIndexUUID(), "after recovery from store"); + shardStateAction.shardStarted(state, shardRouting, indexMetaData.getIndexUUID(), "after recovery from store"); } } catch (Throwable t) { handleRecoveryFailure(indexService, shardRouting, true, t); @@ -653,7 +665,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent getCache() { @@ -107,7 +106,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL final Accountable value = notification.getValue(); for (IndexFieldDataCache.Listener listener : key.listeners) { try { - listener.onRemoval(key.shardId, indexCache.fieldNames, indexCache.fieldDataType, notification.getRemovalReason() == RemovalNotification.RemovalReason.EVICTED, value.ramBytesUsed()); + listener.onRemoval(key.shardId, indexCache.fieldName, indexCache.fieldDataType, notification.getRemovalReason() == RemovalNotification.RemovalReason.EVICTED, value.ramBytesUsed()); } catch (Throwable e) { // load anyway since listeners should not throw exceptions logger.error("Failed to call listener on field data cache unloading", e); @@ -129,16 +128,16 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL static class IndexFieldCache implements IndexFieldDataCache, SegmentReader.CoreClosedListener, IndexReader.ReaderClosedListener { private final ESLogger logger; final Index index; - final MappedFieldType.Names fieldNames; + final String fieldName; final FieldDataType fieldDataType; private final Cache cache; private final Listener[] listeners; - IndexFieldCache(ESLogger logger,final Cache cache, Index index, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Listener... listeners) { + IndexFieldCache(ESLogger logger,final Cache cache, Index index, String fieldName, FieldDataType fieldDataType, Listener... listeners) { this.logger = logger; this.listeners = listeners; this.index = index; - this.fieldNames = fieldNames; + this.fieldName = fieldName; this.fieldDataType = fieldDataType; this.cache = cache; } @@ -156,7 +155,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL final AtomicFieldData fieldData = indexFieldData.loadDirect(context); for (Listener listener : k.listeners) { try { - listener.onCache(shardId, fieldNames, fieldDataType, fieldData); + listener.onCache(shardId, fieldName, fieldDataType, fieldData); } catch (Throwable e) { // load anyway since listeners should not throw exceptions logger.error("Failed to call listener on atomic field data loading", e); @@ -180,7 +179,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL final Accountable ifd = (Accountable) indexFieldData.localGlobalDirect(indexReader); for (Listener listener : k.listeners) { try { - listener.onCache(shardId, fieldNames, fieldDataType, ifd); + listener.onCache(shardId, fieldName, fieldDataType, ifd); } catch (Throwable e) { // load anyway since listeners should not throw exceptions logger.error("Failed to call listener on global ordinals loading", e); @@ -218,7 +217,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL public void clear(String fieldName) { for (Key key : cache.keys()) { if (key.indexCache.index.equals(index)) { - if (key.indexCache.fieldNames.fullName().equals(fieldName)) { + if (key.indexCache.fieldName.equals(fieldName)) { cache.invalidate(key); } } diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java index cfc6357548a..c37cf6def79 100644 --- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java +++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java @@ -19,14 +19,11 @@ package org.elasticsearch.indices.fielddata.cache; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.util.Accountable; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -46,11 +43,11 @@ public class IndicesFieldDataCacheListener implements IndexFieldDataCache.Listen } @Override - public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable fieldData) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable fieldData) { } @Override - public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { assert sizeInBytes >= 0 : "When reducing circuit breaker, it should be adjusted with a number higher or equal to 0 and not [" + sizeInBytes + "]"; circuitBreakerService.getBreaker(CircuitBreaker.FIELDDATA).addWithoutBreaking(-sizeInBytes); } diff --git a/core/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java b/core/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java index f7ae5f94b96..220ce9120e9 100644 --- a/core/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java +++ b/core/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java @@ -19,8 +19,12 @@ package org.elasticsearch.indices.flush; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.index.shard.ShardId; +import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -30,15 +34,15 @@ import static java.util.Collections.unmodifiableMap; /** * Result for all copies of a shard */ -public class ShardsSyncedFlushResult { +public class ShardsSyncedFlushResult implements Streamable { private String failureReason; - private Map shardResponses; + private Map shardResponses; private String syncId; private ShardId shardId; // some shards may be unassigned, so we need this as state private int totalShards; - public ShardsSyncedFlushResult() { + private ShardsSyncedFlushResult() { } public ShardId getShardId() { @@ -59,7 +63,7 @@ public class ShardsSyncedFlushResult { /** * success constructor */ - public ShardsSyncedFlushResult(ShardId shardId, String syncId, int totalShards, Map shardResponses) { + public ShardsSyncedFlushResult(ShardId shardId, String syncId, int totalShards, Map shardResponses) { this.failureReason = null; this.shardResponses = unmodifiableMap(new HashMap<>(shardResponses)); this.syncId = syncId; @@ -98,7 +102,7 @@ public class ShardsSyncedFlushResult { */ public int successfulShards() { int i = 0; - for (SyncedFlushService.SyncedFlushResponse result : shardResponses.values()) { + for (SyncedFlushService.ShardSyncedFlushResponse result : shardResponses.values()) { if (result.success()) { i++; } @@ -109,9 +113,9 @@ public class ShardsSyncedFlushResult { /** * @return an array of shard failures */ - public Map failedShards() { - Map failures = new HashMap<>(); - for (Map.Entry result : shardResponses.entrySet()) { + public Map failedShards() { + Map failures = new HashMap<>(); + for (Map.Entry result : shardResponses.entrySet()) { if (result.getValue().success() == false) { failures.put(result.getKey(), result.getValue()); } @@ -123,11 +127,45 @@ public class ShardsSyncedFlushResult { * @return Individual responses for each shard copy with a detailed failure message if the copy failed to perform the synced flush. * Empty if synced flush failed before step three. */ - public Map shardResponses() { + public Map shardResponses() { return shardResponses; } public ShardId shardId() { return shardId; } + + @Override + public void readFrom(StreamInput in) throws IOException { + failureReason = in.readOptionalString(); + int numResponses = in.readInt(); + shardResponses = new HashMap<>(); + for (int i = 0; i < numResponses; i++) { + ShardRouting shardRouting = ShardRouting.readShardRoutingEntry(in); + SyncedFlushService.ShardSyncedFlushResponse response = SyncedFlushService.ShardSyncedFlushResponse.readSyncedFlushResponse(in); + shardResponses.put(shardRouting, response); + } + syncId = in.readOptionalString(); + shardId = ShardId.readShardId(in); + totalShards = in.readInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(failureReason); + out.writeInt(shardResponses.size()); + for (Map.Entry entry : shardResponses.entrySet()) { + entry.getKey().writeTo(out); + entry.getValue().writeTo(out); + } + out.writeOptionalString(syncId); + shardId.writeTo(out); + out.writeInt(totalShards); + } + + public static ShardsSyncedFlushResult readShardsSyncedFlushResult(StreamInput in) throws IOException { + ShardsSyncedFlushResult shardsSyncedFlushResult = new ShardsSyncedFlushResult(); + shardsSyncedFlushResult.readFrom(in); + return shardsSyncedFlushResult; + } } diff --git a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index ad264c2ac05..0918ad2afee 100644 --- a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -21,6 +21,7 @@ package org.elasticsearch.indices.flush; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -81,9 +82,8 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL this.clusterService = clusterService; this.transportService = transportService; this.indexNameExpressionResolver = indexNameExpressionResolver; - - transportService.registerRequestHandler(PRE_SYNCED_FLUSH_ACTION_NAME, PreSyncedFlushRequest::new, ThreadPool.Names.FLUSH, new PreSyncedFlushTransportHandler()); - transportService.registerRequestHandler(SYNCED_FLUSH_ACTION_NAME, SyncedFlushRequest::new, ThreadPool.Names.FLUSH, new SyncedFlushTransportHandler()); + transportService.registerRequestHandler(PRE_SYNCED_FLUSH_ACTION_NAME, PreShardSyncedFlushRequest::new, ThreadPool.Names.FLUSH, new PreSyncedFlushTransportHandler()); + transportService.registerRequestHandler(SYNCED_FLUSH_ACTION_NAME, ShardSyncedFlushRequest::new, ThreadPool.Names.FLUSH, new SyncedFlushTransportHandler()); transportService.registerRequestHandler(IN_FLIGHT_OPS_ACTION_NAME, InFlightOpsRequest::new, ThreadPool.Names.SAME, new InFlightOpCountTransportHandler()); } @@ -109,7 +109,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL * a utility method to perform a synced flush for all shards of multiple indices. see {@link #attemptSyncedFlush(ShardId, ActionListener)} * for more details. */ - public void attemptSyncedFlush(final String[] aliasesOrIndices, IndicesOptions indicesOptions, final ActionListener listener) { + public void attemptSyncedFlush(final String[] aliasesOrIndices, IndicesOptions indicesOptions, final ActionListener listener) { final ClusterState state = clusterService.state(); final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, indicesOptions, aliasesOrIndices); final Map> results = ConcurrentCollections.newConcurrentMap(); @@ -123,7 +123,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } if (numberOfShards == 0) { - listener.onResponse(new IndicesSyncedFlushResult(results)); + listener.onResponse(new SyncedFlushResponse(results)); return; } final int finalTotalNumberOfShards = totalNumberOfShards; @@ -138,7 +138,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL public void onResponse(ShardsSyncedFlushResult syncedFlushResult) { results.get(index).add(syncedFlushResult); if (countDown.countDown()) { - listener.onResponse(new IndicesSyncedFlushResult(results)); + listener.onResponse(new SyncedFlushResponse(results)); } } @@ -147,7 +147,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL logger.debug("{} unexpected error while executing synced flush", shardId); results.get(index).add(new ShardsSyncedFlushResult(shardId, finalTotalNumberOfShards, e.getMessage())); if (countDown.countDown()) { - listener.onResponse(new IndicesSyncedFlushResult(results)); + listener.onResponse(new SyncedFlushResponse(results)); } } }); @@ -297,33 +297,33 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL void sendSyncRequests(final String syncId, final List shards, ClusterState state, Map expectedCommitIds, final ShardId shardId, final int totalShards, final ActionListener listener) { final CountDown countDown = new CountDown(shards.size()); - final Map results = ConcurrentCollections.newConcurrentMap(); + final Map results = ConcurrentCollections.newConcurrentMap(); for (final ShardRouting shard : shards) { final DiscoveryNode node = state.nodes().get(shard.currentNodeId()); if (node == null) { logger.trace("{} is assigned to an unknown node. skipping for sync id [{}]. shard routing {}", shardId, syncId, shard); - results.put(shard, new SyncedFlushResponse("unknown node")); + results.put(shard, new ShardSyncedFlushResponse("unknown node")); contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); continue; } final Engine.CommitId expectedCommitId = expectedCommitIds.get(shard.currentNodeId()); if (expectedCommitId == null) { logger.trace("{} can't resolve expected commit id for {}, skipping for sync id [{}]. shard routing {}", shardId, syncId, shard); - results.put(shard, new SyncedFlushResponse("no commit id from pre-sync flush")); + results.put(shard, new ShardSyncedFlushResponse("no commit id from pre-sync flush")); contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); continue; } logger.trace("{} sending synced flush request to {}. sync id [{}].", shardId, shard, syncId); - transportService.sendRequest(node, SYNCED_FLUSH_ACTION_NAME, new SyncedFlushRequest(shard.shardId(), syncId, expectedCommitId), - new BaseTransportResponseHandler() { + transportService.sendRequest(node, SYNCED_FLUSH_ACTION_NAME, new ShardSyncedFlushRequest(shard.shardId(), syncId, expectedCommitId), + new BaseTransportResponseHandler() { @Override - public SyncedFlushResponse newInstance() { - return new SyncedFlushResponse(); + public ShardSyncedFlushResponse newInstance() { + return new ShardSyncedFlushResponse(); } @Override - public void handleResponse(SyncedFlushResponse response) { - SyncedFlushResponse existing = results.put(shard, response); + public void handleResponse(ShardSyncedFlushResponse response) { + ShardSyncedFlushResponse existing = results.put(shard, response); assert existing == null : "got two answers for node [" + node + "]"; // count after the assert so we won't decrement twice in handleException contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); @@ -332,7 +332,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL @Override public void handleException(TransportException exp) { logger.trace("{} error while performing synced flush on [{}], skipping", exp, shardId, shard); - results.put(shard, new SyncedFlushResponse(exp.getMessage())); + results.put(shard, new ShardSyncedFlushResponse(exp.getMessage())); contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); } @@ -346,7 +346,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } private void contDownAndSendResponseIfDone(String syncId, List shards, ShardId shardId, int totalShards, - ActionListener listener, CountDown countDown, Map results) { + ActionListener listener, CountDown countDown, Map results) { if (countDown.countDown()) { assert results.size() == shards.size(); listener.onResponse(new ShardsSyncedFlushResult(shardId, syncId, totalShards, results)); @@ -369,7 +369,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } continue; } - transportService.sendRequest(node, PRE_SYNCED_FLUSH_ACTION_NAME, new PreSyncedFlushRequest(shard.shardId()), new BaseTransportResponseHandler() { + transportService.sendRequest(node, PRE_SYNCED_FLUSH_ACTION_NAME, new PreShardSyncedFlushRequest(shard.shardId()), new BaseTransportResponseHandler() { @Override public PreSyncedFlushResponse newInstance() { return new PreSyncedFlushResponse(); @@ -401,7 +401,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } } - private PreSyncedFlushResponse performPreSyncedFlush(PreSyncedFlushRequest request) { + private PreSyncedFlushResponse performPreSyncedFlush(PreShardSyncedFlushRequest request) { IndexShard indexShard = indicesService.indexServiceSafe(request.shardId().getIndex()).getShard(request.shardId().id()); FlushRequest flushRequest = new FlushRequest().force(false).waitIfOngoing(true); logger.trace("{} performing pre sync flush", request.shardId()); @@ -410,7 +410,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL return new PreSyncedFlushResponse(commitId); } - private SyncedFlushResponse performSyncedFlush(SyncedFlushRequest request) { + private ShardSyncedFlushResponse performSyncedFlush(ShardSyncedFlushRequest request) { IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexShard indexShard = indexService.getShard(request.shardId().id()); logger.trace("{} performing sync flush. sync id [{}], expected commit id {}", request.shardId(), request.syncId(), request.expectedCommitId()); @@ -418,11 +418,11 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL logger.trace("{} sync flush done. sync id [{}], result [{}]", request.shardId(), request.syncId(), result); switch (result) { case SUCCESS: - return new SyncedFlushResponse(); + return new ShardSyncedFlushResponse(); case COMMIT_MISMATCH: - return new SyncedFlushResponse("commit has changed"); + return new ShardSyncedFlushResponse("commit has changed"); case PENDING_OPERATIONS: - return new SyncedFlushResponse("pending operations"); + return new ShardSyncedFlushResponse("pending operations"); default: throw new ElasticsearchException("unknown synced flush result [" + result + "]"); } @@ -439,19 +439,19 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL return new InFlightOpsResponse(opCount); } - public final static class PreSyncedFlushRequest extends TransportRequest { + public final static class PreShardSyncedFlushRequest extends TransportRequest { private ShardId shardId; - public PreSyncedFlushRequest() { + public PreShardSyncedFlushRequest() { } - public PreSyncedFlushRequest(ShardId shardId) { + public PreShardSyncedFlushRequest(ShardId shardId) { this.shardId = shardId; } @Override public String toString() { - return "PreSyncedFlushRequest{" + + return "PreShardSyncedFlushRequest{" + "shardId=" + shardId + '}'; } @@ -504,16 +504,16 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } } - public static final class SyncedFlushRequest extends TransportRequest { + public static final class ShardSyncedFlushRequest extends TransportRequest { private String syncId; private Engine.CommitId expectedCommitId; private ShardId shardId; - public SyncedFlushRequest() { + public ShardSyncedFlushRequest() { } - public SyncedFlushRequest(ShardId shardId, String syncId, Engine.CommitId expectedCommitId) { + public ShardSyncedFlushRequest(ShardId shardId, String syncId, Engine.CommitId expectedCommitId) { this.expectedCommitId = expectedCommitId; this.shardId = shardId; this.syncId = syncId; @@ -549,7 +549,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL @Override public String toString() { - return "SyncedFlushRequest{" + + return "ShardSyncedFlushRequest{" + "shardId=" + shardId + ",syncId='" + syncId + '\'' + '}'; @@ -559,18 +559,18 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL /** * Response for third step of synced flush (writing the sync id) for one shard copy */ - public static final class SyncedFlushResponse extends TransportResponse { + public static final class ShardSyncedFlushResponse extends TransportResponse { /** * a non null value indicates a failure to sync flush. null means success */ String failureReason; - public SyncedFlushResponse() { + public ShardSyncedFlushResponse() { failureReason = null; } - public SyncedFlushResponse(String failureReason) { + public ShardSyncedFlushResponse(String failureReason) { this.failureReason = failureReason; } @@ -596,11 +596,17 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL @Override public String toString() { - return "SyncedFlushResponse{" + + return "ShardSyncedFlushResponse{" + "success=" + success() + ", failureReason='" + failureReason + '\'' + '}'; } + + public static ShardSyncedFlushResponse readSyncedFlushResponse(StreamInput in) throws IOException { + ShardSyncedFlushResponse shardSyncedFlushResponse = new ShardSyncedFlushResponse(); + shardSyncedFlushResponse.readFrom(in); + return shardSyncedFlushResponse; + } } @@ -677,18 +683,18 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } } - private final class PreSyncedFlushTransportHandler implements TransportRequestHandler { + private final class PreSyncedFlushTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(PreSyncedFlushRequest request, TransportChannel channel) throws Exception { + public void messageReceived(PreShardSyncedFlushRequest request, TransportChannel channel) throws Exception { channel.sendResponse(performPreSyncedFlush(request)); } } - private final class SyncedFlushTransportHandler implements TransportRequestHandler { + private final class SyncedFlushTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(SyncedFlushRequest request, TransportChannel channel) throws Exception { + public void messageReceived(ShardSyncedFlushRequest request, TransportChannel channel) throws Exception { channel.sendResponse(performSyncedFlush(request)); } } diff --git a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java index f63534e1ac0..56166134df5 100644 --- a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java +++ b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java @@ -19,12 +19,6 @@ package org.elasticsearch.indices.memory; -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.ReentrantLock; - import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -41,6 +35,18 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.threadpool.ThreadPool; +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.PriorityQueue; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReentrantLock; + public class IndexingMemoryController extends AbstractLifecycleComponent { /** How much heap (% or bytes) we will share across all actively indexing shards on this node (default: 10%). */ diff --git a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java index 0cec415d63b..08b7b34e91a 100644 --- a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java @@ -21,6 +21,7 @@ package org.elasticsearch.indices.query; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.EmptyQueryBuilder; @@ -40,11 +41,12 @@ public class IndicesQueriesRegistry extends AbstractComponent { public IndicesQueriesRegistry(Settings settings, Set injectedQueryParsers, NamedWriteableRegistry namedWriteableRegistry) { super(settings); Map> queryParsers = new HashMap<>(); - for (QueryParser queryParser : injectedQueryParsers) { + for (@SuppressWarnings("unchecked") QueryParser queryParser : injectedQueryParsers) { for (String name : queryParser.names()) { queryParsers.put(name, queryParser); } - namedWriteableRegistry.registerPrototype(QueryBuilder.class, queryParser.getBuilderPrototype()); + @SuppressWarnings("unchecked") NamedWriteable qb = queryParser.getBuilderPrototype(); + namedWriteableRegistry.registerPrototype(QueryBuilder.class, qb); } // EmptyQueryBuilder is not registered as query parser but used internally. // We need to register it with the NamedWriteableRegistry in order to serialize it @@ -58,4 +60,4 @@ public class IndicesQueriesRegistry extends AbstractComponent { public Map> queryParsers() { return queryParsers; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index 6db38d59e85..c86309db136 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -23,61 +23,43 @@ import org.apache.lucene.store.RateLimiter; import org.apache.lucene.store.RateLimiter.SimpleRateLimiter; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.node.settings.NodeSettingsService; -import org.elasticsearch.threadpool.ThreadPool; -import java.io.Closeable; -import java.util.Objects; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; +public class RecoverySettings extends AbstractComponent { -/** - */ -public class RecoverySettings extends AbstractComponent implements Closeable { - - public static final String INDICES_RECOVERY_CONCURRENT_STREAMS = "indices.recovery.concurrent_streams"; - public static final String INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS = "indices.recovery.concurrent_small_file_streams"; - public static final String INDICES_RECOVERY_MAX_BYTES_PER_SEC = "indices.recovery.max_bytes_per_sec"; + public static final Setting INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), true, Setting.Scope.CLUSTER); /** * how long to wait before retrying after issues cause by cluster state syncing between nodes * i.e., local node is not yet known on remote node, remote shard not yet started etc. */ - public static final String INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC = "indices.recovery.retry_delay_state_sync"; + public static final Setting INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_state_sync", TimeValue.timeValueMillis(500), true, Setting.Scope.CLUSTER); /** how long to wait before retrying after network related issues */ - public static final String INDICES_RECOVERY_RETRY_DELAY_NETWORK = "indices.recovery.retry_delay_network"; - - /** - * recoveries that don't show any activity for more then this interval will be failed. - * defaults to `indices.recovery.internal_action_long_timeout` - */ - public static final String INDICES_RECOVERY_ACTIVITY_TIMEOUT = "indices.recovery.recovery_activity_timeout"; + public static final Setting INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_network", TimeValue.timeValueSeconds(5), true, Setting.Scope.CLUSTER); /** timeout value to use for requests made as part of the recovery process */ - public static final String INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT = "indices.recovery.internal_action_timeout"; + public static final Setting INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.recovery.internal_action_timeout", TimeValue.timeValueMinutes(15), true, Setting.Scope.CLUSTER); /** * timeout value to use for requests made as part of the recovery process that are expected to take long time. * defaults to twice `indices.recovery.internal_action_timeout`. */ - public static final String INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT = "indices.recovery.internal_action_long_timeout"; + public static final Setting INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.internal_action_long_timeout", (s) -> TimeValue.timeValueMillis(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.get(s).millis() * 2).toString(), TimeValue.timeValueSeconds(0), true, Setting.Scope.CLUSTER); - - public static final long SMALL_FILE_CUTOFF_BYTES = ByteSizeValue.parseBytesSizeValue("5mb", "SMALL_FILE_CUTOFF_BYTES").bytes(); + /** + * recoveries that don't show any activity for more then this interval will be failed. + * defaults to `indices.recovery.internal_action_long_timeout` + */ + public static final Setting INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.recovery_activity_timeout", (s) -> INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getRaw(s) , TimeValue.timeValueSeconds(0), true, Setting.Scope.CLUSTER); public static final ByteSizeValue DEFAULT_CHUNK_SIZE = new ByteSizeValue(512, ByteSizeUnit.KB); - private volatile int concurrentStreams; - private volatile int concurrentSmallFileStreams; - private final ThreadPoolExecutor concurrentStreamPool; - private final ThreadPoolExecutor concurrentSmallFileStreamPool; - private volatile ByteSizeValue maxBytesPerSec; private volatile SimpleRateLimiter rateLimiter; private volatile TimeValue retryDelayStateSync; @@ -89,55 +71,35 @@ public class RecoverySettings extends AbstractComponent implements Closeable { private volatile ByteSizeValue chunkSize = DEFAULT_CHUNK_SIZE; @Inject - public RecoverySettings(Settings settings, NodeSettingsService nodeSettingsService) { + public RecoverySettings(Settings settings, ClusterSettings clusterSettings) { super(settings); - this.retryDelayStateSync = settings.getAsTime(INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, TimeValue.timeValueMillis(500)); + this.retryDelayStateSync = INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.get(settings); // doesn't have to be fast as nodes are reconnected every 10s by default (see InternalClusterService.ReconnectToNodes) // and we want to give the master time to remove a faulty node - this.retryDelayNetwork = settings.getAsTime(INDICES_RECOVERY_RETRY_DELAY_NETWORK, TimeValue.timeValueSeconds(5)); + this.retryDelayNetwork = INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.get(settings); - this.internalActionTimeout = settings.getAsTime(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, TimeValue.timeValueMinutes(15)); - this.internalActionLongTimeout = settings.getAsTime(INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, new TimeValue(internalActionTimeout.millis() * 2)); + this.internalActionTimeout = INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.get(settings); + this.internalActionLongTimeout = INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.get(settings); - this.activityTimeout = settings.getAsTime(INDICES_RECOVERY_ACTIVITY_TIMEOUT, - // default to the internalActionLongTimeout used as timeouts on RecoverySource - internalActionLongTimeout - ); + this.activityTimeout = INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.get(settings); - - this.concurrentStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_STREAMS, 3); - this.concurrentStreamPool = EsExecutors.newScaling("recovery_stream", 0, concurrentStreams, 60, TimeUnit.SECONDS, - EsExecutors.daemonThreadFactory(settings, "[recovery_stream]")); - this.concurrentSmallFileStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 2); - this.concurrentSmallFileStreamPool = EsExecutors.newScaling("small_file_recovery_stream", 0, concurrentSmallFileStreams, 60, - TimeUnit.SECONDS, EsExecutors.daemonThreadFactory(settings, "[small_file_recovery_stream]")); - - this.maxBytesPerSec = settings.getAsBytesSize(INDICES_RECOVERY_MAX_BYTES_PER_SEC, new ByteSizeValue(40, ByteSizeUnit.MB)); + this.maxBytesPerSec = INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.get(settings); if (maxBytesPerSec.bytes() <= 0) { rateLimiter = null; } else { rateLimiter = new SimpleRateLimiter(maxBytesPerSec.mbFrac()); } - logger.debug("using max_bytes_per_sec[{}], concurrent_streams [{}]", - maxBytesPerSec, concurrentStreams); - nodeSettingsService.addListener(new ApplySettings()); - } + logger.debug("using max_bytes_per_sec[{}]", maxBytesPerSec); - @Override - public void close() { - ThreadPool.terminate(concurrentStreamPool, 1, TimeUnit.SECONDS); - ThreadPool.terminate(concurrentSmallFileStreamPool, 1, TimeUnit.SECONDS); - } - - public ThreadPoolExecutor concurrentStreamPool() { - return concurrentStreamPool; - } - - public ThreadPoolExecutor concurrentSmallFileStreamPool() { - return concurrentSmallFileStreamPool; + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, this::setMaxBytesPerSec); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING, this::setRetryDelayStateSync); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING, this::setRetryDelayNetwork); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING, this::setInternalActionTimeout); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING, this::setInternalActionLongTimeout); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, this::setActivityTimeout); } public RateLimiter rateLimiter() { @@ -174,50 +136,34 @@ public class RecoverySettings extends AbstractComponent implements Closeable { } - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - ByteSizeValue maxSizePerSec = settings.getAsBytesSize(INDICES_RECOVERY_MAX_BYTES_PER_SEC, RecoverySettings.this.maxBytesPerSec); - if (!Objects.equals(maxSizePerSec, RecoverySettings.this.maxBytesPerSec)) { - logger.info("updating [{}] from [{}] to [{}]", INDICES_RECOVERY_MAX_BYTES_PER_SEC, RecoverySettings.this.maxBytesPerSec, maxSizePerSec); - RecoverySettings.this.maxBytesPerSec = maxSizePerSec; - if (maxSizePerSec.bytes() <= 0) { - rateLimiter = null; - } else if (rateLimiter != null) { - rateLimiter.setMBPerSec(maxSizePerSec.mbFrac()); - } else { - rateLimiter = new SimpleRateLimiter(maxSizePerSec.mbFrac()); - } - } + public void setRetryDelayStateSync(TimeValue retryDelayStateSync) { + this.retryDelayStateSync = retryDelayStateSync; + } - int concurrentStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_STREAMS, RecoverySettings.this.concurrentStreams); - if (concurrentStreams != RecoverySettings.this.concurrentStreams) { - logger.info("updating [indices.recovery.concurrent_streams] from [{}] to [{}]", RecoverySettings.this.concurrentStreams, concurrentStreams); - RecoverySettings.this.concurrentStreams = concurrentStreams; - RecoverySettings.this.concurrentStreamPool.setMaximumPoolSize(concurrentStreams); - } + public void setRetryDelayNetwork(TimeValue retryDelayNetwork) { + this.retryDelayNetwork = retryDelayNetwork; + } - int concurrentSmallFileStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, RecoverySettings.this.concurrentSmallFileStreams); - if (concurrentSmallFileStreams != RecoverySettings.this.concurrentSmallFileStreams) { - logger.info("updating [indices.recovery.concurrent_small_file_streams] from [{}] to [{}]", RecoverySettings.this.concurrentSmallFileStreams, concurrentSmallFileStreams); - RecoverySettings.this.concurrentSmallFileStreams = concurrentSmallFileStreams; - RecoverySettings.this.concurrentSmallFileStreamPool.setMaximumPoolSize(concurrentSmallFileStreams); - } + public void setActivityTimeout(TimeValue activityTimeout) { + this.activityTimeout = activityTimeout; + } - RecoverySettings.this.retryDelayNetwork = maybeUpdate(RecoverySettings.this.retryDelayNetwork, settings, INDICES_RECOVERY_RETRY_DELAY_NETWORK); - RecoverySettings.this.retryDelayStateSync = maybeUpdate(RecoverySettings.this.retryDelayStateSync, settings, INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC); - RecoverySettings.this.activityTimeout = maybeUpdate(RecoverySettings.this.activityTimeout, settings, INDICES_RECOVERY_ACTIVITY_TIMEOUT); - RecoverySettings.this.internalActionTimeout = maybeUpdate(RecoverySettings.this.internalActionTimeout, settings, INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT); - RecoverySettings.this.internalActionLongTimeout = maybeUpdate(RecoverySettings.this.internalActionLongTimeout, settings, INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT); - } + public void setInternalActionTimeout(TimeValue internalActionTimeout) { + this.internalActionTimeout = internalActionTimeout; + } - private TimeValue maybeUpdate(final TimeValue currentValue, final Settings settings, final String key) { - final TimeValue value = settings.getAsTime(key, currentValue); - if (value.equals(currentValue)) { - return currentValue; - } - logger.info("updating [] from [{}] to [{}]", key, currentValue, value); - return value; + public void setInternalActionLongTimeout(TimeValue internalActionLongTimeout) { + this.internalActionLongTimeout = internalActionLongTimeout; + } + + private void setMaxBytesPerSec(ByteSizeValue maxBytesPerSec) { + this.maxBytesPerSec = maxBytesPerSec; + if (maxBytesPerSec.bytes() <= 0) { + rateLimiter = null; + } else if (rateLimiter != null) { + rateLimiter.setMBPerSec(maxBytesPerSec.mbFrac()); + } else { + rateLimiter = new SimpleRateLimiter(maxBytesPerSec.mbFrac()); } } } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java index 80c18ef3d63..c0270e71721 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java @@ -37,7 +37,12 @@ import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * The source recovery accepts recovery requests from other peer shards and start the recovery process from this diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 4057af00841..94c78efccd8 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -36,13 +36,14 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; -import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CancellableThreads; import org.elasticsearch.common.util.CancellableThreads.Interruptable; import org.elasticsearch.index.engine.RecoveryEngineException; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IllegalIndexShardStateException; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardClosedException; +import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.index.translog.Translog; @@ -57,10 +58,6 @@ import java.io.OutputStream; import java.util.ArrayList; import java.util.Comparator; import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; import java.util.stream.StreamSupport; @@ -69,6 +66,12 @@ import java.util.stream.StreamSupport; * RecoverySourceHandler handles the three phases of shard recovery, which is * everything relating to copying the segment files as well as sending translog * operations across the wire once the segments have been copied. + * + * Note: There is always one source handler per recovery that handles all the + * file and translog transfer. This handler is completely isolated from other recoveries + * while the {@link RateLimiter} passed via {@link RecoverySettings} is shared across recoveries + * originating from this nodes to throttle the number bytes send during file transfer. The transaction log + * phase bypasses the rate limiter entirely. */ public class RecoverySourceHandler { @@ -458,10 +461,6 @@ public class RecoverySourceHandler { // index docs to replicas while the index files are recovered // the lock can potentially be removed, in which case, it might // make sense to re-enable throttling in this phase -// if (recoverySettings.rateLimiter() != null) { -// recoverySettings.rateLimiter().pause(size); -// } - cancellableThreads.execute(() -> { final RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest( request.recoveryId(), request.shardId(), operations, snapshot.estimatedTotalOperations()); @@ -554,6 +553,7 @@ public class RecoverySourceHandler { cancellableThreads.execute(() -> { // Pause using the rate limiter, if desired, to throttle the recovery final long throttleTimeInNanos; + // always fetch the ratelimiter - it might be updated in real-time on the recovery settings final RateLimiter rl = recoverySettings.rateLimiter(); if (rl != null) { long bytes = bytesSinceLastPause.addAndGet(content.length()); @@ -591,100 +591,38 @@ public class RecoverySourceHandler { void sendFiles(Store store, StoreFileMetaData[] files, Function outputStreamFactory) throws Throwable { store.incRef(); try { - Future[] runners = asyncSendFiles(store, files, outputStreamFactory); - IOException corruptedEngine = null; - final List exceptions = new ArrayList<>(); - for (int i = 0; i < runners.length; i++) { - StoreFileMetaData md = files[i]; - try { - runners[i].get(); - } catch (ExecutionException t) { - corruptedEngine = handleExecutionException(store, corruptedEngine, exceptions, md, t.getCause()); - } catch (InterruptedException t) { - corruptedEngine = handleExecutionException(store, corruptedEngine, exceptions, md, t); + ArrayUtil.timSort(files, (a,b) -> Long.compare(a.length(), b.length())); // send smallest first + for (int i = 0; i < files.length; i++) { + final StoreFileMetaData md = files[i]; + try (final IndexInput indexInput = store.directory().openInput(md.name(), IOContext.READONCE)) { + // it's fine that we are only having the indexInput in the try/with block. The copy methods handles + // exceptions during close correctly and doesn't hide the original exception. + Streams.copy(new InputStreamIndexInput(indexInput, md.length()), outputStreamFactory.apply(md)); + } catch (Throwable t) { + final IOException corruptIndexException; + if ((corruptIndexException = ExceptionsHelper.unwrapCorruption(t)) != null) { + if (store.checkIntegrityNoException(md) == false) { // we are corrupted on the primary -- fail! + logger.warn("{} Corrupted file detected {} checksum mismatch", shardId, md); + failEngine(corruptIndexException); + throw corruptIndexException; + } else { // corruption has happened on the way to replica + RemoteTransportException exception = new RemoteTransportException("File corruption occurred on recovery but checksums are ok", null); + exception.addSuppressed(t); + logger.warn("{} Remote file corruption on node {}, recovering {}. local checksum OK", + corruptIndexException, shardId, request.targetNode(), md); + throw exception; + } + } else { + throw t; + } } } - if (corruptedEngine != null) { - failEngine(corruptedEngine); - throw corruptedEngine; - } else { - ExceptionsHelper.rethrowAndSuppress(exceptions); - } } finally { store.decRef(); } } - private IOException handleExecutionException(Store store, IOException corruptedEngine, List exceptions, StoreFileMetaData md, Throwable t) { - logger.debug("Failed to transfer file [" + md + "] on recovery"); - final IOException corruptIndexException; - final boolean checkIntegrity = corruptedEngine == null; - if ((corruptIndexException = ExceptionsHelper.unwrapCorruption(t)) != null) { - if (checkIntegrity && store.checkIntegrityNoException(md) == false) { // we are corrupted on the primary -- fail! - logger.warn("{} Corrupted file detected {} checksum mismatch", shardId, md); - corruptedEngine = corruptIndexException; - } else { // corruption has happened on the way to replica - RemoteTransportException exception = new RemoteTransportException("File corruption occurred on recovery but checksums are ok", null); - exception.addSuppressed(t); - if (checkIntegrity) { - logger.warn("{} Remote file corruption on node {}, recovering {}. local checksum OK", - corruptIndexException, shardId, request.targetNode(), md); - } else { - logger.warn("{} Remote file corruption on node {}, recovering {}. local checksum are skipped", - corruptIndexException, shardId, request.targetNode(), md); - } - exceptions.add(exception); - - } - } else { - exceptions.add(t); - } - return corruptedEngine; - } - protected void failEngine(IOException cause) { shard.failShard("recovery", cause); } - - Future[] asyncSendFiles(Store store, StoreFileMetaData[] files, Function outputStreamFactory) { - store.incRef(); - try { - final Future[] futures = new Future[files.length]; - for (int i = 0; i < files.length; i++) { - final StoreFileMetaData md = files[i]; - long fileSize = md.length(); - - // Files are split into two categories, files that are "small" - // (under 5mb) and other files. Small files are transferred - // using a separate thread pool dedicated to small files. - // - // The idea behind this is that while we are transferring an - // older, large index, a user may create a new index, but that - // index will not be able to recover until the large index - // finishes, by using two different thread pools we can allow - // tiny files (like segments for a brand new index) to be - // recovered while ongoing large segment recoveries are - // happening. It also allows these pools to be configured - // separately. - ThreadPoolExecutor pool; - if (fileSize > RecoverySettings.SMALL_FILE_CUTOFF_BYTES) { - pool = recoverySettings.concurrentStreamPool(); - } else { - pool = recoverySettings.concurrentSmallFileStreamPool(); - } - Future future = pool.submit(() -> { - try (final IndexInput indexInput = store.directory().openInput(md.name(), IOContext.READONCE)) { - // it's fine that we are only having the indexInput int he try/with block. The copy methods handles - // exceptions during close correctly and doesn't hide the original exception. - Streams.copy(new InputStreamIndexInput(indexInput, md.length()), outputStreamFactory.apply(md)); - } - return null; - }); - futures[i] = future; - } - return futures; - } finally { - store.decRef(); - } - } } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index 32e644ab7b8..f7e683b8f14 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -45,10 +45,21 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.RecoveryEngineException; import org.elasticsearch.index.mapper.MapperException; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IllegalIndexShardStateException; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardClosedException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; +import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.store.Store; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.FutureTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Arrays; diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 45f2f91b0be..4a76d262130 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -19,7 +19,13 @@ package org.elasticsearch.indices.store; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -39,7 +45,13 @@ import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportService; import java.io.Closeable; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index d963ea24303..65902b443e9 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -43,8 +43,8 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.AsyncShardFetch; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; @@ -55,7 +55,11 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReferenceArray; diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java index f095cc355ef..4f48e4f7b93 100644 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java @@ -36,20 +36,21 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.node.settings.NodeSettingsService; import java.io.IOException; import java.util.ArrayList; @@ -66,7 +67,7 @@ import java.util.concurrent.locks.ReentrantLock; */ public class IndicesTTLService extends AbstractLifecycleComponent { - public static final String INDICES_TTL_INTERVAL = "indices.ttl.interval"; + public static final Setting INDICES_TTL_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.CLUSTER); public static final String INDEX_TTL_DISABLE_PURGE = "index.ttl.disable_purge"; private final ClusterService clusterService; @@ -77,16 +78,15 @@ public class IndicesTTLService extends AbstractLifecycleComponent shardsToPurge) { for (IndexShard shardToPurge : shardsToPurge) { - Query query = shardToPurge.mapperService().smartNameFieldType(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, true); + Query query = shardToPurge.mapperService().fullName(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, true); Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl"); try { logger.debug("[{}][{}] purging shard", shardToPurge.routingEntry().index(), shardToPurge.routingEntry().id()); @@ -310,20 +310,6 @@ public class IndicesTTLService extends AbstractLifecycleComponent clazz = + (Class)Class.forName("com.sun.management.HotSpotDiagnosticMXBean"); + Class vmOptionClazz = Class.forName("com.sun.management.VMOption"); + PlatformManagedObject hotSpotDiagnosticMXBean = ManagementFactory.getPlatformMXBean(clazz); + Method vmOptionMethod = clazz.getMethod("getVMOption", String.class); + Object useCompressedOopsVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseCompressedOops"); + Method valueMethod = vmOptionClazz.getMethod("getValue"); + info.useCompressedOops = (String)valueMethod.invoke(useCompressedOopsVmOption); + } catch (Throwable t) { + // unable to deduce the state of compressed oops + info.useCompressedOops = "unknown"; + } + INSTANCE = info; } @@ -135,6 +156,8 @@ public class JvmInfo implements Streamable, ToXContent { String[] gcCollectors = Strings.EMPTY_ARRAY; String[] memoryPools = Strings.EMPTY_ARRAY; + private String useCompressedOops; + private JvmInfo() { } @@ -258,6 +281,18 @@ public class JvmInfo implements Streamable, ToXContent { return this.systemProperties; } + /** + * The value of the JVM flag UseCompressedOops, if available otherwise + * "unknown". The value "unknown" indicates that an attempt was + * made to obtain the value of the flag on this JVM and the attempt + * failed. + * + * @return the value of the JVM flag UseCompressedOops or "unknown" + */ + public String useCompressedOops() { + return this.useCompressedOops; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.JVM); @@ -279,6 +314,8 @@ public class JvmInfo implements Streamable, ToXContent { builder.field(Fields.GC_COLLECTORS, gcCollectors); builder.field(Fields.MEMORY_POOLS, memoryPools); + builder.field(Fields.USING_COMPRESSED_OOPS, useCompressedOops); + builder.endObject(); return builder; } @@ -306,6 +343,7 @@ public class JvmInfo implements Streamable, ToXContent { static final XContentBuilderString DIRECT_MAX_IN_BYTES = new XContentBuilderString("direct_max_in_bytes"); static final XContentBuilderString GC_COLLECTORS = new XContentBuilderString("gc_collectors"); static final XContentBuilderString MEMORY_POOLS = new XContentBuilderString("memory_pools"); + static final XContentBuilderString USING_COMPRESSED_OOPS = new XContentBuilderString("using_compressed_ordinary_object_pointers"); } public static JvmInfo readJvmInfo(StreamInput in) throws IOException { @@ -337,6 +375,7 @@ public class JvmInfo implements Streamable, ToXContent { mem.readFrom(in); gcCollectors = in.readStringArray(); memoryPools = in.readStringArray(); + useCompressedOops = in.readString(); } @Override @@ -361,6 +400,7 @@ public class JvmInfo implements Streamable, ToXContent { mem.writeTo(out); out.writeStringArray(gcCollectors); out.writeStringArray(memoryPools); + out.writeString(useCompressedOops); } public static class Mem implements Streamable { diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java index c695e265ac5..276ef537bc8 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java @@ -29,7 +29,15 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; -import java.lang.management.*; +import java.lang.management.BufferPoolMXBean; +import java.lang.management.ClassLoadingMXBean; +import java.lang.management.GarbageCollectorMXBean; +import java.lang.management.ManagementFactory; +import java.lang.management.MemoryMXBean; +import java.lang.management.MemoryPoolMXBean; +import java.lang.management.MemoryUsage; +import java.lang.management.RuntimeMXBean; +import java.lang.management.ThreadMXBean; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileSystemFactory.java b/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java similarity index 68% rename from plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileSystemFactory.java rename to core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java index 5e7c4d3fa57..599755e78a4 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FileSystemFactory.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java @@ -16,13 +16,19 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.repositories.hdfs; -import java.io.IOException; +package org.elasticsearch.monitor.os; -import org.apache.hadoop.fs.FileSystem; +public class DummyOsInfo extends OsInfo { -interface FileSystemFactory { + DummyOsInfo() { + refreshInterval = 0; + availableProcessors = 0; + allocatedProcessors = 0; + name = "dummy_name"; + arch = "dummy_arch"; + version = "dummy_version"; + } - FileSystem getFileSystem() throws IOException; + public static final DummyOsInfo INSTANCE = new DummyOsInfo(); } diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java b/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java index f34cd51a143..d94447221c3 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java @@ -108,6 +108,9 @@ public class OsInfo implements Streamable, ToXContent { refreshInterval = in.readLong(); availableProcessors = in.readInt(); allocatedProcessors = in.readInt(); + name = in.readOptionalString(); + arch = in.readOptionalString(); + version = in.readOptionalString(); } @Override @@ -115,5 +118,8 @@ public class OsInfo implements Streamable, ToXContent { out.writeLong(refreshInterval); out.writeInt(availableProcessors); out.writeInt(allocatedProcessors); + out.writeOptionalString(name); + out.writeOptionalString(arch); + out.writeOptionalString(version); } } diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index d3f6367cac0..a9651eace33 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -23,7 +23,6 @@ import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionModule; -import org.elasticsearch.bootstrap.Elasticsearch; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.Client; import org.elasticsearch.client.node.NodeClientModule; @@ -33,7 +32,6 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.common.StopWatch; -import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Injector; @@ -46,6 +44,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; @@ -62,7 +61,6 @@ import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.GatewayModule; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.http.HttpServer; -import org.elasticsearch.http.HttpServerModule; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesService; @@ -78,7 +76,6 @@ import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.internal.InternalSettingsPreparer; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.percolator.PercolatorModule; import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.plugins.Plugin; @@ -86,7 +83,6 @@ import org.elasticsearch.plugins.PluginsModule; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.repositories.RepositoriesModule; import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestModule; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; @@ -95,7 +91,6 @@ import org.elasticsearch.snapshots.SnapshotShardsService; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportService; import org.elasticsearch.tribe.TribeModule; import org.elasticsearch.tribe.TribeService; @@ -108,7 +103,6 @@ import java.net.Inet6Address; import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.charset.Charset; -import java.nio.file.CopyOption; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; @@ -170,7 +164,6 @@ public class Node implements Releasable { throw new IllegalStateException("Failed to created node environment", ex); } final NetworkService networkService = new NetworkService(settings); - final NodeSettingsService nodeSettingsService = new NodeSettingsService(settings); final SettingsFilter settingsFilter = new SettingsFilter(settings); final ThreadPool threadPool = new ThreadPool(settings); boolean success = false; @@ -185,20 +178,15 @@ public class Node implements Releasable { } modules.add(new PluginsModule(pluginsService)); modules.add(new SettingsModule(this.settings, settingsFilter)); - modules.add(new NodeModule(this, nodeSettingsService, monitorService)); - modules.add(new NetworkModule(networkService)); - modules.add(new ScriptModule(this.settings)); modules.add(new EnvironmentModule(environment)); + modules.add(new NodeModule(this, monitorService)); + modules.add(new NetworkModule(networkService, settings, false)); + modules.add(new ScriptModule(this.settings)); modules.add(new NodeEnvironmentModule(nodeEnvironment)); modules.add(new ClusterNameModule(this.settings)); modules.add(new ThreadPoolModule(threadPool)); modules.add(new DiscoveryModule(this.settings)); modules.add(new ClusterModule(this.settings)); - modules.add(new RestModule(this.settings)); - modules.add(new TransportModule(settings)); - if (settings.getAsBoolean(HTTP_ENABLED, true)) { - modules.add(new HttpServerModule(settings)); - } modules.add(new IndicesModule()); modules.add(new SearchModule()); modules.add(new ActionModule(false)); @@ -215,7 +203,7 @@ public class Node implements Releasable { injector = modules.createInjector(); client = injector.getInstance(Client.class); - threadPool.setNodeSettingsService(injector.getInstance(NodeSettingsService.class)); + threadPool.setClusterSettings(injector.getInstance(ClusterSettings.class)); success = true; } catch (IOException ex) { throw new ElasticsearchException("failed to bind service", ex); @@ -334,7 +322,6 @@ public class Node implements Releasable { for (Class plugin : pluginsService.nodeServices()) { injector.getInstance(plugin).stop(); } - injector.getInstance(RecoverySettings.class).close(); // we should stop this last since it waits for resources to get released // if we had scroll searchers etc or recovery going on we wait for to finish. injector.getInstance(IndicesService.class).stop(); diff --git a/core/src/main/java/org/elasticsearch/node/NodeModule.java b/core/src/main/java/org/elasticsearch/node/NodeModule.java index 3641c325030..aa52d389340 100644 --- a/core/src/main/java/org/elasticsearch/node/NodeModule.java +++ b/core/src/main/java/org/elasticsearch/node/NodeModule.java @@ -23,9 +23,7 @@ import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.monitor.MonitorService; -import org.elasticsearch.node.Node; import org.elasticsearch.node.service.NodeService; -import org.elasticsearch.node.settings.NodeSettingsService; /** * @@ -33,16 +31,14 @@ import org.elasticsearch.node.settings.NodeSettingsService; public class NodeModule extends AbstractModule { private final Node node; - private final NodeSettingsService nodeSettingsService; private final MonitorService monitorService; // pkg private so tests can mock Class pageCacheRecyclerImpl = PageCacheRecycler.class; Class bigArraysImpl = BigArrays.class; - public NodeModule(Node node, NodeSettingsService nodeSettingsService, MonitorService monitorService) { + public NodeModule(Node node, MonitorService monitorService) { this.node = node; - this.nodeSettingsService = nodeSettingsService; this.monitorService = monitorService; } @@ -60,7 +56,6 @@ public class NodeModule extends AbstractModule { } bind(Node.class).toInstance(node); - bind(NodeSettingsService.class).toInstance(nodeSettingsService); bind(MonitorService.class).toInstance(monitorService); bind(NodeService.class).asEagerSingleton(); } diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 7bede53e7ec..11db520ed7d 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -19,8 +19,6 @@ package org.elasticsearch.node.internal; -import java.nio.charset.StandardCharsets; - import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Booleans; @@ -35,6 +33,7 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/node/settings/NodeSettingsService.java b/core/src/main/java/org/elasticsearch/node/settings/NodeSettingsService.java deleted file mode 100644 index dbe6a33172b..00000000000 --- a/core/src/main/java/org/elasticsearch/node/settings/NodeSettingsService.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.node.settings; - -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.logging.ESLoggerFactory; -import org.elasticsearch.common.settings.Settings; - -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; - -/** - * A service that allows to register for node settings change that can come from cluster - * events holding new settings. - */ -public class NodeSettingsService extends AbstractComponent implements ClusterStateListener { - - private static volatile Settings globalSettings = Settings.Builder.EMPTY_SETTINGS; - - /** - * Returns the global (static) settings last updated by a node. Note, if you have multiple - * nodes on the same JVM, it will just return the latest one set... - */ - public static Settings getGlobalSettings() { - return globalSettings; - } - - private volatile Settings lastSettingsApplied; - - private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>(); - - @Inject - public NodeSettingsService(Settings settings) { - super(settings); - globalSettings = settings; - } - - // inject it as a member, so we won't get into possible cyclic problems - public void setClusterService(ClusterService clusterService) { - clusterService.add(this); - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - // nothing to do until we actually recover from the gateway or any other block indicates we need to disable persistency - if (event.state().blocks().disableStatePersistence()) { - return; - } - - if (!event.metaDataChanged()) { - // nothing changed in the metadata, no need to check - return; - } - - if (lastSettingsApplied != null && event.state().metaData().settings().equals(lastSettingsApplied)) { - // nothing changed in the settings, ignore - return; - } - - for (Listener listener : listeners) { - try { - listener.onRefreshSettings(event.state().metaData().settings()); - } catch (Exception e) { - logger.warn("failed to refresh settings for [{}]", e, listener); - } - } - - try { - for (Map.Entry entry : event.state().metaData().settings().getAsMap().entrySet()) { - if (entry.getKey().startsWith("logger.")) { - String component = entry.getKey().substring("logger.".length()); - if ("_root".equals(component)) { - ESLoggerFactory.getRootLogger().setLevel(entry.getValue()); - } else { - ESLoggerFactory.getLogger(component).setLevel(entry.getValue()); - } - } - } - } catch (Exception e) { - logger.warn("failed to refresh settings for [{}]", e, "logger"); - } - - lastSettingsApplied = event.state().metaData().settings(); - globalSettings = lastSettingsApplied; - } - - /** - * Only settings registered in {@link org.elasticsearch.cluster.ClusterModule} can be changed dynamically. - */ - public void addListener(Listener listener) { - this.listeners.add(listener); - } - - public void removeListener(Listener listener) { - this.listeners.remove(listener); - } - - public interface Listener { - void onRefreshSettings(Settings settings); - } -} diff --git a/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java b/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java index 5d581178622..e1488ef8cde 100644 --- a/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java +++ b/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java @@ -22,7 +22,12 @@ package org.elasticsearch.percolator; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.index.*; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.MultiReader; +import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.CloseableThreadLocal; diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java index 70abaaaff3d..267ea7a50bf 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java @@ -19,7 +19,6 @@ package org.elasticsearch.percolator; import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; - import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; @@ -38,12 +37,11 @@ import org.elasticsearch.common.HasHeaders; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; @@ -74,6 +72,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -139,7 +138,7 @@ public class PercolateContext extends SearchContext { this.bigArrays = bigArrays.withCircuitBreaking(); this.querySearchResult = new QuerySearchResult(0, searchShardTarget); this.engineSearcher = indexShard.acquireSearcher("percolate"); - this.searcher = new ContextIndexSearcher(this, engineSearcher); + this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy()); this.scriptService = scriptService; this.numberOfShards = request.getNumberOfShards(); this.aliasFilter = aliasFilter; @@ -164,7 +163,7 @@ public class PercolateContext extends SearchContext { fields.put(field.name(), new InternalSearchHitField(field.name(), Collections.emptyList())); } hitContext().reset( - new InternalSearchHit(0, "unknown", new StringText(parsedDocument.type()), fields), + new InternalSearchHit(0, "unknown", new Text(parsedDocument.type()), fields), atomicReaderContext, 0, docSearcher.searcher() ); } @@ -638,12 +637,7 @@ public class PercolateContext extends SearchContext { @Override public MappedFieldType smartNameFieldType(String name) { - return mapperService().smartNameFieldType(name, types); - } - - @Override - public MappedFieldType smartNameFieldTypeFromAnyType(String name) { - return mapperService().smartNameFieldType(name); + return mapperService().fullName(name); } @Override @@ -748,5 +742,7 @@ public class PercolateContext extends SearchContext { } @Override - public QueryCache getQueryCache() { return indexService.cache().query();} + public Profilers getProfilers() { + throw new UnsupportedOperationException(); + } } diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java index fa7b47766a8..8cc691b866b 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java @@ -19,7 +19,6 @@ package org.elasticsearch.percolator; import com.carrotsearch.hppc.IntObjectHashMap; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.memory.ExtendedMemoryIndex; @@ -52,8 +51,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.BytesText; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -533,10 +530,10 @@ public class PercolatorService extends AbstractComponent { List finalMatches = new ArrayList<>(requestedSize == 0 ? numMatches : requestedSize); outer: for (PercolateShardResponse response : shardResults) { - Text index = new StringText(response.getIndex()); + Text index = new Text(response.getIndex()); for (int i = 0; i < response.matches().length; i++) { float score = response.scores().length == 0 ? NO_SCORE : response.scores()[i]; - Text match = new BytesText(new BytesArray(response.matches()[i])); + Text match = new Text(new BytesArray(response.matches()[i])); Map hl = response.hls().isEmpty() ? null : response.hls().get(i); finalMatches.add(new PercolateResponse.Match(index, match, score, hl)); if (requestedSize != 0 && finalMatches.size() == requestedSize) { @@ -686,10 +683,10 @@ public class PercolatorService extends AbstractComponent { List finalMatches = new ArrayList<>(requestedSize); if (nonEmptyResponses == 1) { PercolateShardResponse response = shardResults.get(firstNonEmptyIndex); - Text index = new StringText(response.getIndex()); + Text index = new Text(response.getIndex()); for (int i = 0; i < response.matches().length; i++) { float score = response.scores().length == 0 ? Float.NaN : response.scores()[i]; - Text match = new BytesText(new BytesArray(response.matches()[i])); + Text match = new Text(new BytesArray(response.matches()[i])); if (!response.hls().isEmpty()) { Map hl = response.hls().get(i); finalMatches.add(new PercolateResponse.Match(index, match, score, hl)); @@ -728,8 +725,8 @@ public class PercolatorService extends AbstractComponent { slots[requestIndex]++; PercolateShardResponse shardResponse = shardResults.get(requestIndex); - Text index = new StringText(shardResponse.getIndex()); - Text match = new BytesText(new BytesArray(shardResponse.matches()[itemIndex])); + Text index = new Text(shardResponse.getIndex()); + Text match = new Text(new BytesArray(shardResponse.matches()[itemIndex])); float score = shardResponse.scores()[itemIndex]; if (!shardResponse.hls().isEmpty()) { Map hl = shardResponse.hls().get(itemIndex); @@ -763,7 +760,7 @@ public class PercolatorService extends AbstractComponent { hls = new ArrayList<>(topDocs.scoreDocs.length); } - final MappedFieldType uidMapper = context.mapperService().smartNameFieldType(UidFieldMapper.NAME); + final MappedFieldType uidMapper = context.mapperService().fullName(UidFieldMapper.NAME); final IndexFieldData uidFieldData = context.fieldData().getForField(uidMapper); int i = 0; for (ScoreDoc scoreDoc : topDocs.scoreDocs) { diff --git a/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java b/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java index 094201c6184..828ff4f08e4 100644 --- a/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java +++ b/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java @@ -19,10 +19,16 @@ package org.elasticsearch.percolator; import com.carrotsearch.hppc.FloatArrayList; - import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.*; import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.SimpleCollector; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopScoreDocCollector; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.Lucene; @@ -67,7 +73,7 @@ abstract class QueryCollector extends SimpleCollector { this.logger = logger; this.queries = context.percolateQueries(); this.searcher = context.docSearcher(); - final MappedFieldType uidMapper = context.mapperService().smartNameFieldType(UidFieldMapper.NAME); + final MappedFieldType uidMapper = context.mapperService().fullName(UidFieldMapper.NAME); this.uidFieldData = context.fieldData().getForField(uidMapper); this.isNestedDoc = isNestedDoc; diff --git a/core/src/main/java/org/elasticsearch/cluster/settings/ClusterDynamicSettings.java b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java similarity index 60% rename from core/src/main/java/org/elasticsearch/cluster/settings/ClusterDynamicSettings.java rename to core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java index b537c448bea..a57a96c631d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/settings/ClusterDynamicSettings.java +++ b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java @@ -16,23 +16,13 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.plugins; -package org.elasticsearch.cluster.settings; +public class DummyPluginInfo extends PluginInfo { -import org.elasticsearch.common.inject.BindingAnnotation; + private DummyPluginInfo(String name, String description, boolean site, String version, boolean jvm, String classname, boolean isolated) { + super(name, description, site, version, jvm, classname, isolated); + } -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.Target; - -import static java.lang.annotation.ElementType.FIELD; -import static java.lang.annotation.ElementType.PARAMETER; -import static java.lang.annotation.RetentionPolicy.RUNTIME; - - -@BindingAnnotation -@Target({FIELD, PARAMETER}) -@Retention(RUNTIME) -@Documented -public @interface ClusterDynamicSettings { -} \ No newline at end of file + public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", true, "dummy_plugin_version", true, "DummyPluginName", true); +} diff --git a/core/src/main/java/org/elasticsearch/plugins/Plugin.java b/core/src/main/java/org/elasticsearch/plugins/Plugin.java index 1db0ac966d7..1efc151836d 100644 --- a/core/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -23,12 +23,9 @@ import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.index.IndexService; -import java.io.Closeable; import java.util.Collection; import java.util.Collections; -import java.util.List; /** * An extension point allowing to plug in custom functionality. diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java index 1ebe7813d3c..7cd50409fb6 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java @@ -20,7 +20,11 @@ package org.elasticsearch.plugins; import org.apache.lucene.util.IOUtils; -import org.elasticsearch.*; +import org.elasticsearch.Build; +import org.elasticsearch.ElasticsearchCorruptionException; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; @@ -36,9 +40,23 @@ import java.io.IOException; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URL; -import java.nio.file.*; -import java.nio.file.attribute.*; -import java.util.*; +import java.nio.file.DirectoryStream; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.GroupPrincipal; +import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFileAttributes; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.UserPrincipal; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; import java.util.stream.StreamSupport; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 5ebd43d5026..50938a1916c 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -46,7 +46,6 @@ import java.net.URL; import java.net.URLClassLoader; import java.nio.file.DirectoryStream; import java.nio.file.Files; -import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; diff --git a/core/src/main/java/org/elasticsearch/repositories/RepositoryMissingException.java b/core/src/main/java/org/elasticsearch/repositories/RepositoryMissingException.java index fe513619d06..8b4e0976e18 100644 --- a/core/src/main/java/org/elasticsearch/repositories/RepositoryMissingException.java +++ b/core/src/main/java/org/elasticsearch/repositories/RepositoryMissingException.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.rest.RestStatus; diff --git a/core/src/main/java/org/elasticsearch/repositories/RepositoryVerificationException.java b/core/src/main/java/org/elasticsearch/repositories/RepositoryVerificationException.java index 3f8429f1f15..2574ff3b092 100644 --- a/core/src/main/java/org/elasticsearch/repositories/RepositoryVerificationException.java +++ b/core/src/main/java/org/elasticsearch/repositories/RepositoryVerificationException.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.rest.RestStatus; diff --git a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java index 1ead50d1584..91600488332 100644 --- a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.repositories; import com.carrotsearch.hppc.ObjectContainer; import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 2648a183362..e941492e7f0 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -65,8 +65,6 @@ import org.elasticsearch.snapshots.SnapshotShardFailure; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; -import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.util.ArrayList; import java.util.Collections; diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java index b15da26e3f6..6d8400f648c 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java @@ -33,10 +33,17 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.FromXContentBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.gateway.CorruptStateException; -import java.io.*; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import java.util.Locale; /** diff --git a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index 478158282d6..33f9d4e7c30 100644 --- a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -33,7 +33,6 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import java.io.IOException; import java.nio.file.Path; -import java.nio.file.Paths; /** * Shared file system implementation of the BlobStoreRepository diff --git a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 1ae1e575692..294338c0501 100644 --- a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -19,7 +19,11 @@ package org.elasticsearch.rest; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; import org.elasticsearch.common.ParseFieldMatcher; diff --git a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index fc944f49460..ba7e54c123f 100644 --- a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -21,10 +21,8 @@ package org.elasticsearch.rest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.bootstrap.Elasticsearch; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.ToXContent; diff --git a/core/src/main/java/org/elasticsearch/rest/RestFilter.java b/core/src/main/java/org/elasticsearch/rest/RestFilter.java index dd86c026b75..e3ff44ff1fc 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestFilter.java +++ b/core/src/main/java/org/elasticsearch/rest/RestFilter.java @@ -19,8 +19,6 @@ package org.elasticsearch.rest; -import org.elasticsearch.ElasticsearchException; - import java.io.Closeable; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/RestActionModule.java b/core/src/main/java/org/elasticsearch/rest/action/RestActionModule.java deleted file mode 100644 index f0e4d10d7c4..00000000000 --- a/core/src/main/java/org/elasticsearch/rest/action/RestActionModule.java +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.multibindings.Multibinder; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.action.admin.cluster.health.RestClusterHealthAction; -import org.elasticsearch.rest.action.admin.cluster.node.hotthreads.RestNodesHotThreadsAction; -import org.elasticsearch.rest.action.admin.cluster.node.info.RestNodesInfoAction; -import org.elasticsearch.rest.action.admin.cluster.node.stats.RestNodesStatsAction; -import org.elasticsearch.rest.action.admin.cluster.repositories.delete.RestDeleteRepositoryAction; -import org.elasticsearch.rest.action.admin.cluster.repositories.get.RestGetRepositoriesAction; -import org.elasticsearch.rest.action.admin.cluster.repositories.put.RestPutRepositoryAction; -import org.elasticsearch.rest.action.admin.cluster.repositories.verify.RestVerifyRepositoryAction; -import org.elasticsearch.rest.action.admin.cluster.reroute.RestClusterRerouteAction; -import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterGetSettingsAction; -import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterUpdateSettingsAction; -import org.elasticsearch.rest.action.admin.cluster.shards.RestClusterSearchShardsAction; -import org.elasticsearch.rest.action.admin.cluster.snapshots.create.RestCreateSnapshotAction; -import org.elasticsearch.rest.action.admin.cluster.snapshots.delete.RestDeleteSnapshotAction; -import org.elasticsearch.rest.action.admin.cluster.snapshots.get.RestGetSnapshotsAction; -import org.elasticsearch.rest.action.admin.cluster.snapshots.restore.RestRestoreSnapshotAction; -import org.elasticsearch.rest.action.admin.cluster.snapshots.status.RestSnapshotsStatusAction; -import org.elasticsearch.rest.action.admin.cluster.state.RestClusterStateAction; -import org.elasticsearch.rest.action.admin.cluster.stats.RestClusterStatsAction; -import org.elasticsearch.rest.action.admin.cluster.tasks.RestPendingClusterTasksAction; -import org.elasticsearch.rest.action.admin.indices.alias.RestIndicesAliasesAction; -import org.elasticsearch.rest.action.admin.indices.alias.delete.RestIndexDeleteAliasesAction; -import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetAliasesAction; -import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetIndicesAliasesAction; -import org.elasticsearch.rest.action.admin.indices.alias.head.RestAliasesExistAction; -import org.elasticsearch.rest.action.admin.indices.alias.put.RestIndexPutAliasAction; -import org.elasticsearch.rest.action.admin.indices.analyze.RestAnalyzeAction; -import org.elasticsearch.rest.action.admin.indices.cache.clear.RestClearIndicesCacheAction; -import org.elasticsearch.rest.action.admin.indices.close.RestCloseIndexAction; -import org.elasticsearch.rest.action.admin.indices.create.RestCreateIndexAction; -import org.elasticsearch.rest.action.admin.indices.delete.RestDeleteIndexAction; -import org.elasticsearch.rest.action.admin.indices.exists.indices.RestIndicesExistsAction; -import org.elasticsearch.rest.action.admin.indices.exists.types.RestTypesExistsAction; -import org.elasticsearch.rest.action.admin.indices.flush.RestFlushAction; -import org.elasticsearch.rest.action.admin.indices.flush.RestSyncedFlushAction; -import org.elasticsearch.rest.action.admin.indices.forcemerge.RestForceMergeAction; -import org.elasticsearch.rest.action.admin.indices.get.RestGetIndicesAction; -import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetFieldMappingAction; -import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetMappingAction; -import org.elasticsearch.rest.action.admin.indices.mapping.put.RestPutMappingAction; -import org.elasticsearch.rest.action.admin.indices.open.RestOpenIndexAction; -import org.elasticsearch.rest.action.admin.indices.recovery.RestRecoveryAction; -import org.elasticsearch.rest.action.admin.indices.refresh.RestRefreshAction; -import org.elasticsearch.rest.action.admin.indices.segments.RestIndicesSegmentsAction; -import org.elasticsearch.rest.action.admin.indices.settings.RestGetSettingsAction; -import org.elasticsearch.rest.action.admin.indices.settings.RestUpdateSettingsAction; -import org.elasticsearch.rest.action.admin.indices.shards.RestIndicesShardStoresAction; -import org.elasticsearch.rest.action.admin.indices.stats.RestIndicesStatsAction; -import org.elasticsearch.rest.action.admin.indices.template.delete.RestDeleteIndexTemplateAction; -import org.elasticsearch.rest.action.admin.indices.template.get.RestGetIndexTemplateAction; -import org.elasticsearch.rest.action.admin.indices.template.head.RestHeadIndexTemplateAction; -import org.elasticsearch.rest.action.admin.indices.template.put.RestPutIndexTemplateAction; -import org.elasticsearch.rest.action.admin.indices.upgrade.RestUpgradeAction; -import org.elasticsearch.rest.action.admin.indices.validate.query.RestValidateQueryAction; -import org.elasticsearch.rest.action.admin.indices.validate.template.RestRenderSearchTemplateAction; -import org.elasticsearch.rest.action.admin.indices.warmer.delete.RestDeleteWarmerAction; -import org.elasticsearch.rest.action.admin.indices.warmer.get.RestGetWarmerAction; -import org.elasticsearch.rest.action.admin.indices.warmer.put.RestPutWarmerAction; -import org.elasticsearch.rest.action.bulk.RestBulkAction; -import org.elasticsearch.rest.action.cat.AbstractCatAction; -import org.elasticsearch.rest.action.cat.RestAliasAction; -import org.elasticsearch.rest.action.cat.RestAllocationAction; -import org.elasticsearch.rest.action.cat.RestCatAction; -import org.elasticsearch.rest.action.cat.RestFielddataAction; -import org.elasticsearch.rest.action.cat.RestHealthAction; -import org.elasticsearch.rest.action.cat.RestIndicesAction; -import org.elasticsearch.rest.action.cat.RestMasterAction; -import org.elasticsearch.rest.action.cat.RestNodeAttrsAction; -import org.elasticsearch.rest.action.cat.RestNodesAction; -import org.elasticsearch.rest.action.cat.RestPluginsAction; -import org.elasticsearch.rest.action.cat.RestRepositoriesAction; -import org.elasticsearch.rest.action.cat.RestSegmentsAction; -import org.elasticsearch.rest.action.cat.RestShardsAction; -import org.elasticsearch.rest.action.cat.RestSnapshotAction; -import org.elasticsearch.rest.action.cat.RestThreadPoolAction; -import org.elasticsearch.rest.action.delete.RestDeleteAction; -import org.elasticsearch.rest.action.explain.RestExplainAction; -import org.elasticsearch.rest.action.fieldstats.RestFieldStatsAction; -import org.elasticsearch.rest.action.get.RestGetAction; -import org.elasticsearch.rest.action.get.RestGetSourceAction; -import org.elasticsearch.rest.action.get.RestHeadAction; -import org.elasticsearch.rest.action.get.RestMultiGetAction; -import org.elasticsearch.rest.action.index.RestIndexAction; -import org.elasticsearch.rest.action.main.RestMainAction; -import org.elasticsearch.rest.action.percolate.RestMultiPercolateAction; -import org.elasticsearch.rest.action.percolate.RestPercolateAction; -import org.elasticsearch.rest.action.script.RestDeleteIndexedScriptAction; -import org.elasticsearch.rest.action.script.RestGetIndexedScriptAction; -import org.elasticsearch.rest.action.script.RestPutIndexedScriptAction; -import org.elasticsearch.rest.action.search.RestClearScrollAction; -import org.elasticsearch.rest.action.search.RestMultiSearchAction; -import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.rest.action.search.RestSearchScrollAction; -import org.elasticsearch.rest.action.suggest.RestSuggestAction; -import org.elasticsearch.rest.action.template.RestDeleteSearchTemplateAction; -import org.elasticsearch.rest.action.template.RestGetSearchTemplateAction; -import org.elasticsearch.rest.action.template.RestPutSearchTemplateAction; -import org.elasticsearch.rest.action.termvectors.RestMultiTermVectorsAction; -import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction; -import org.elasticsearch.rest.action.update.RestUpdateAction; - -import java.util.ArrayList; -import java.util.List; - -/** - * - */ -public class RestActionModule extends AbstractModule { - private List> restPluginsActions = new ArrayList<>(); - - public RestActionModule(List> restPluginsActions) { - this.restPluginsActions = restPluginsActions; - } - - @Override - protected void configure() { - for (Class restAction : restPluginsActions) { - bind(restAction).asEagerSingleton(); - } - - bind(RestMainAction.class).asEagerSingleton(); - - bind(RestNodesInfoAction.class).asEagerSingleton(); - bind(RestNodesStatsAction.class).asEagerSingleton(); - bind(RestNodesHotThreadsAction.class).asEagerSingleton(); - bind(RestClusterStatsAction.class).asEagerSingleton(); - bind(RestClusterStateAction.class).asEagerSingleton(); - bind(RestClusterHealthAction.class).asEagerSingleton(); - bind(RestClusterUpdateSettingsAction.class).asEagerSingleton(); - bind(RestClusterGetSettingsAction.class).asEagerSingleton(); - bind(RestClusterRerouteAction.class).asEagerSingleton(); - bind(RestClusterSearchShardsAction.class).asEagerSingleton(); - bind(RestPendingClusterTasksAction.class).asEagerSingleton(); - bind(RestPutRepositoryAction.class).asEagerSingleton(); - bind(RestGetRepositoriesAction.class).asEagerSingleton(); - bind(RestDeleteRepositoryAction.class).asEagerSingleton(); - bind(RestVerifyRepositoryAction.class).asEagerSingleton(); - bind(RestGetSnapshotsAction.class).asEagerSingleton(); - bind(RestCreateSnapshotAction.class).asEagerSingleton(); - bind(RestRestoreSnapshotAction.class).asEagerSingleton(); - bind(RestDeleteSnapshotAction.class).asEagerSingleton(); - bind(RestSnapshotsStatusAction.class).asEagerSingleton(); - - bind(RestIndicesExistsAction.class).asEagerSingleton(); - bind(RestTypesExistsAction.class).asEagerSingleton(); - bind(RestGetIndicesAction.class).asEagerSingleton(); - bind(RestIndicesStatsAction.class).asEagerSingleton(); - bind(RestIndicesSegmentsAction.class).asEagerSingleton(); - bind(RestIndicesShardStoresAction.class).asEagerSingleton(); - bind(RestGetAliasesAction.class).asEagerSingleton(); - bind(RestAliasesExistAction.class).asEagerSingleton(); - bind(RestIndexDeleteAliasesAction.class).asEagerSingleton(); - bind(RestIndexPutAliasAction.class).asEagerSingleton(); - bind(RestIndicesAliasesAction.class).asEagerSingleton(); - bind(RestGetIndicesAliasesAction.class).asEagerSingleton(); - bind(RestCreateIndexAction.class).asEagerSingleton(); - bind(RestDeleteIndexAction.class).asEagerSingleton(); - bind(RestCloseIndexAction.class).asEagerSingleton(); - bind(RestOpenIndexAction.class).asEagerSingleton(); - - bind(RestUpdateSettingsAction.class).asEagerSingleton(); - bind(RestGetSettingsAction.class).asEagerSingleton(); - - bind(RestAnalyzeAction.class).asEagerSingleton(); - bind(RestGetIndexTemplateAction.class).asEagerSingleton(); - bind(RestPutIndexTemplateAction.class).asEagerSingleton(); - bind(RestDeleteIndexTemplateAction.class).asEagerSingleton(); - bind(RestHeadIndexTemplateAction.class).asEagerSingleton(); - - bind(RestPutWarmerAction.class).asEagerSingleton(); - bind(RestDeleteWarmerAction.class).asEagerSingleton(); - bind(RestGetWarmerAction.class).asEagerSingleton(); - - bind(RestPutMappingAction.class).asEagerSingleton(); - bind(RestGetMappingAction.class).asEagerSingleton(); - bind(RestGetFieldMappingAction.class).asEagerSingleton(); - - bind(RestRefreshAction.class).asEagerSingleton(); - bind(RestFlushAction.class).asEagerSingleton(); - bind(RestSyncedFlushAction.class).asEagerSingleton(); - bind(RestForceMergeAction.class).asEagerSingleton(); - bind(RestUpgradeAction.class).asEagerSingleton(); - bind(RestClearIndicesCacheAction.class).asEagerSingleton(); - - bind(RestIndexAction.class).asEagerSingleton(); - bind(RestGetAction.class).asEagerSingleton(); - bind(RestGetSourceAction.class).asEagerSingleton(); - bind(RestHeadAction.class).asEagerSingleton(); - bind(RestMultiGetAction.class).asEagerSingleton(); - bind(RestDeleteAction.class).asEagerSingleton(); - bind(org.elasticsearch.rest.action.count.RestCountAction.class).asEagerSingleton(); - bind(RestSuggestAction.class).asEagerSingleton(); - bind(RestTermVectorsAction.class).asEagerSingleton(); - bind(RestMultiTermVectorsAction.class).asEagerSingleton(); - bind(RestBulkAction.class).asEagerSingleton(); - bind(RestUpdateAction.class).asEagerSingleton(); - bind(RestPercolateAction.class).asEagerSingleton(); - bind(RestMultiPercolateAction.class).asEagerSingleton(); - - bind(RestSearchAction.class).asEagerSingleton(); - bind(RestSearchScrollAction.class).asEagerSingleton(); - bind(RestClearScrollAction.class).asEagerSingleton(); - bind(RestMultiSearchAction.class).asEagerSingleton(); - bind(RestRenderSearchTemplateAction.class).asEagerSingleton(); - - bind(RestValidateQueryAction.class).asEagerSingleton(); - - bind(RestExplainAction.class).asEagerSingleton(); - - bind(RestRecoveryAction.class).asEagerSingleton(); - - // Templates API - bind(RestGetSearchTemplateAction.class).asEagerSingleton(); - bind(RestPutSearchTemplateAction.class).asEagerSingleton(); - bind(RestDeleteSearchTemplateAction.class).asEagerSingleton(); - - // Scripts API - bind(RestGetIndexedScriptAction.class).asEagerSingleton(); - bind(RestPutIndexedScriptAction.class).asEagerSingleton(); - bind(RestDeleteIndexedScriptAction.class).asEagerSingleton(); - - - bind(RestFieldStatsAction.class).asEagerSingleton(); - - // cat API - Multibinder catActionMultibinder = Multibinder.newSetBinder(binder(), AbstractCatAction.class); - catActionMultibinder.addBinding().to(RestAllocationAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestShardsAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestMasterAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestNodesAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestIndicesAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestSegmentsAction.class).asEagerSingleton(); - // Fully qualified to prevent interference with rest.action.count.RestCountAction - catActionMultibinder.addBinding().to(org.elasticsearch.rest.action.cat.RestCountAction.class).asEagerSingleton(); - // Fully qualified to prevent interference with rest.action.indices.RestRecoveryAction - catActionMultibinder.addBinding().to(org.elasticsearch.rest.action.cat.RestRecoveryAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestHealthAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(org.elasticsearch.rest.action.cat.RestPendingClusterTasksAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestAliasAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestThreadPoolAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestPluginsAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestFielddataAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestNodeAttrsAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestRepositoriesAction.class).asEagerSingleton(); - catActionMultibinder.addBinding().to(RestSnapshotAction.class).asEagerSingleton(); - // no abstract cat action - bind(RestCatAction.class).asEagerSingleton(); - } -} diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java index 98d4feca189..badf6f6de58 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java @@ -21,12 +21,15 @@ package org.elasticsearch.rest.action.admin.cluster.health; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import java.util.Locale; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java index c1a39cc1432..24c4c449410 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java @@ -27,7 +27,13 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestResponseListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java index 910d3dcc833..786891d330d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java @@ -27,7 +27,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java new file mode 100644 index 00000000000..813c7822428 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.cluster.node.tasks; + +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.support.RestToXContentListener; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + + +public class RestListTasksAction extends BaseRestHandler { + + @Inject + public RestListTasksAction(Settings settings, RestController controller, Client client) { + super(settings, controller, client); + controller.registerHandler(GET, "/_tasks", this); + controller.registerHandler(GET, "/_tasks/{nodeId}", this); + controller.registerHandler(GET, "/_tasks/{nodeId}/{actions}", this); + } + + @Override + public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { + boolean detailed = request.paramAsBoolean("detailed", false); + String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); + String[] actions = Strings.splitStringByCommaToArray(request.param("actions")); + String parentNode = request.param("parent_node"); + long parentTaskId = request.paramAsLong("parent_task", ListTasksRequest.ALL_TASKS); + + ListTasksRequest listTasksRequest = new ListTasksRequest(nodesIds); + listTasksRequest.detailed(detailed); + listTasksRequest.actions(actions); + listTasksRequest.parentNode(parentNode); + listTasksRequest.parentTaskId(parentTaskId); + client.admin().cluster().listTasks(listTasksRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java index 758ee34505a..36e02ba4599 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteReposito import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.client.Requests.deleteRepositoryRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java index 9f09081417a..fd347ccd332 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java @@ -29,7 +29,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.client.Requests.getRepositoryRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java index b974a9be0fb..feeeeb77aba 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResp import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.client.Requests.putRepositoryRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java index 6e3a889f691..c0c7ad5b953 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java @@ -23,7 +23,10 @@ import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyReposito import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.client.Requests.verifyRepositoryRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java index 7d5d2c9d5ff..387728918a6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java @@ -30,7 +30,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java index a1cfdb48ddb..fc4432a658f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java @@ -23,19 +23,33 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; +import java.io.IOException; + /** */ public class RestClusterGetSettingsAction extends BaseRestHandler { + private final ClusterSettings clusterSettings; + @Inject - public RestClusterGetSettingsAction(Settings settings, RestController controller, Client client) { + public RestClusterGetSettingsAction(Settings settings, RestController controller, Client client, ClusterSettings clusterSettings) { super(settings, controller, client); + this.clusterSettings = clusterSettings; controller.registerHandler(RestRequest.Method.GET, "/_cluster/settings", this); } @@ -44,24 +58,34 @@ public class RestClusterGetSettingsAction extends BaseRestHandler { ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest() .routingTable(false) .nodes(false); + final boolean renderDefaults = request.paramAsBoolean("defaults", false); clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local())); client.admin().cluster().state(clusterStateRequest, new RestBuilderListener(channel) { @Override public RestResponse buildResponse(ClusterStateResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - - builder.startObject("persistent"); - response.getState().metaData().persistentSettings().toXContent(builder, request); - builder.endObject(); - - builder.startObject("transient"); - response.getState().metaData().transientSettings().toXContent(builder, request); - builder.endObject(); - - builder.endObject(); - - return new BytesRestResponse(RestStatus.OK, builder); + return new BytesRestResponse(RestStatus.OK, renderResponse(response.getState(), renderDefaults, builder, request)); } }); } + + private XContentBuilder renderResponse(ClusterState state, boolean renderDefaults, XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + + builder.startObject("persistent"); + state.metaData().persistentSettings().toXContent(builder, params); + builder.endObject(); + + builder.startObject("transient"); + state.metaData().transientSettings().toXContent(builder, params); + builder.endObject(); + + if (renderDefaults) { + builder.startObject("defaults"); + clusterSettings.diff(state.metaData().settings(), this.settings).toXContent(builder, params); + builder.endObject(); + } + + builder.endObject(); + return builder; + } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java index b30f6848f30..aa84606b076 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java @@ -28,7 +28,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java index a797a474eb6..ee68c1bbb7a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java @@ -27,7 +27,10 @@ import org.elasticsearch.client.Requests; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java index 4bdef344d3f..bf9dd4a0119 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRes import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.client.Requests.createSnapshotRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java index 7e88817ff7f..66b5a4188c0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRes import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.client.Requests.deleteSnapshotRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java index 21aadd174ec..123798cf995 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java @@ -25,7 +25,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.client.Requests.getSnapshotsRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java index 4dc2eed7794..028285d3064 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotR import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.client.Requests.restoreSnapshotRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java index f22300432a5..b60a740a15d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java @@ -25,7 +25,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.client.Requests.snapshotsStatusRequest; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java index 7f72326bf0a..f28ecfe4888 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java @@ -31,7 +31,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.util.EnumSet; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java index 975c460dda8..b14293ba310 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java index 0277a084619..5d9eac430b5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java index 5648abc7f14..f62d6febee5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java @@ -28,7 +28,10 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java index b9102a0048f..6748cc2509d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.rest.RestRequest.Method.DELETE; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java index 4a1b0bf938b..aa62ee471dc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java @@ -31,7 +31,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java index 528799cba28..4c774b58645 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java @@ -32,7 +32,12 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.common.Strings.isAllOrWildcard; @@ -89,4 +94,4 @@ public class RestGetIndicesAliasesAction extends BaseRestHandler { }); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java index 93342fbf0ba..fce40123b68 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java @@ -28,7 +28,11 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import static org.elasticsearch.rest.RestRequest.Method.HEAD; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java index 42ea9cba99c..cc06a14b8de 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java @@ -29,7 +29,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java index 940b6c1cefa..091fbc1680d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java index 2d54e360f41..41a272cc8b7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java @@ -24,7 +24,10 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java index 43201592e31..0851fb867b7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java index 8ea4e633bc1..6843f5c5ce2 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java @@ -26,7 +26,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.HEAD; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java index a03a7f0fe1d..f1f227edfdd 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java @@ -25,7 +25,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.HEAD; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java index 97a951d2ad2..47c0451adfc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java index 9a3f844abb1..4fe893bd411 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java @@ -19,15 +19,20 @@ package org.elasticsearch.rest.action.admin.indices.flush; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.indices.flush.IndicesSyncedFlushResult; -import org.elasticsearch.indices.flush.SyncedFlushService; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -38,12 +43,9 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; */ public class RestSyncedFlushAction extends BaseRestHandler { - private final SyncedFlushService syncedFlushService; - @Inject - public RestSyncedFlushAction(Settings settings, RestController controller, Client client, SyncedFlushService syncedFlushService) { + public RestSyncedFlushAction(Settings settings, RestController controller, Client client) { super(settings, controller, client); - this.syncedFlushService = syncedFlushService; controller.registerHandler(POST, "/_flush/synced", this); controller.registerHandler(POST, "/{index}/_flush/synced", this); @@ -53,12 +55,12 @@ public class RestSyncedFlushAction extends BaseRestHandler { @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { - String[] indices = Strings.splitStringByCommaToArray(request.param("index")); IndicesOptions indicesOptions = IndicesOptions.fromRequest(request, IndicesOptions.lenientExpandOpen()); - - syncedFlushService.attemptSyncedFlush(indices, indicesOptions, new RestBuilderListener(channel) { + SyncedFlushRequest syncedFlushRequest = new SyncedFlushRequest(Strings.splitStringByCommaToArray(request.param("index"))); + syncedFlushRequest.indicesOptions(indicesOptions); + client.admin().indices().syncedFlush(syncedFlushRequest, new RestBuilderListener(channel) { @Override - public RestResponse buildResponse(IndicesSyncedFlushResult results, XContentBuilder builder) throws Exception { + public RestResponse buildResponse(SyncedFlushResponse results, XContentBuilder builder) throws Exception { builder.startObject(); results.toXContent(builder, request); builder.endObject(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java index 730276c1a2b..d8ef7bace3a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java @@ -27,10 +27,14 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; -import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestStatus.OK; import static org.elasticsearch.rest.action.support.RestActions.buildBroadcastShardsHeader; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java index da2130adeaf..86336ccf971 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java @@ -19,14 +19,12 @@ package org.elasticsearch.rest.action.admin.indices.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.AliasMetaData; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java index 3cfb6f6da64..48fa60cb4b0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java @@ -34,7 +34,12 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.TypeMissingException; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java index 4189d490310..3ceecbfd3a9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.client.Requests.putMappingRequest; @@ -51,7 +54,7 @@ public class RestPutMappingAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/{type}/_mapping", this); controller.registerHandler(POST, "/{index}/_mapping/{type}", this); controller.registerHandler(POST, "/_mapping/{type}", this); - + //register the same paths, but with plural form _mappings controller.registerHandler(PUT, "/{index}/_mappings/", this); controller.registerHandler(PUT, "/{index}/{type}/_mappings", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java index e81bca30f6a..cb22f81ba46 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java index 9d470c4b051..e46831e81e8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java index 85775d55394..e552b13316a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java index 7356d1b759f..a233c75da58 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java index a413b145c73..f27897aa731 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java @@ -29,7 +29,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java index 005b30e6207..1a8ba58306d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.indices.settings; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; @@ -88,6 +87,6 @@ public class RestUpdateSettingsAction extends BaseRestHandler { } updateSettingsRequest.settings(updateSettings); - client.admin().indices().updateSettings(updateSettingsRequest, new AcknowledgedRestListener(channel)); + client.admin().indices().updateSettings(updateSettingsRequest, new AcknowledgedRestListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java index a776efb63a7..e2dc64cc475 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java @@ -20,15 +20,20 @@ package org.elasticsearch.rest.action.admin.indices.shards; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction; -import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; +import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java index 4431ba5f4b3..891afd6b8cf 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java index 2b6ebbc6023..a4c1869609b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java @@ -23,7 +23,10 @@ import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplat import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java index a4f8805539d..0838fa887e6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java @@ -23,7 +23,12 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResp import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.HEAD; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java index e555cfd0fac..45f8a674dde 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java @@ -23,7 +23,10 @@ import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRespo import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java index 0a4592f387f..81bdaf7536b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java @@ -32,7 +32,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/delete/RestDeleteWarmerAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/delete/RestDeleteWarmerAction.java index 1d3fae87616..4fe07564031 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/delete/RestDeleteWarmerAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/delete/RestDeleteWarmerAction.java @@ -25,7 +25,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.AcknowledgedRestListener; import static org.elasticsearch.rest.RestRequest.Method.DELETE; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/get/RestGetWarmerAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/get/RestGetWarmerAction.java index 67e01017678..26f1186f550 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/get/RestGetWarmerAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/warmer/get/RestGetWarmerAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.search.warmer.IndexWarmersMetaData; diff --git a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java index 536b73ba2b5..37ce03bac70 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java @@ -31,7 +31,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.POST; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java index 95873b8878c..895211a0979 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java @@ -23,7 +23,12 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.io.UTF8StreamWriter; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import static org.elasticsearch.rest.action.support.RestTable.buildHelpWidths; import static org.elasticsearch.rest.action.support.RestTable.pad; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java index 3d2b283714f..b322fef9f30 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java @@ -27,7 +27,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java index 938743bf3fa..7acc0763e85 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.cat; import com.carrotsearch.hppc.ObjectIntScatterMap; - import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -35,7 +34,10 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java index cebcf3159ab..337684769f3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java @@ -22,7 +22,12 @@ package org.elasticsearch.rest.action.cat; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java index a881f72104b..486e04cc343 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java @@ -19,8 +19,8 @@ package org.elasticsearch.rest.action.cat; -import com.carrotsearch.hppc.ObjectLongMap; import com.carrotsearch.hppc.ObjectLongHashMap; +import com.carrotsearch.hppc.ObjectLongMap; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -29,7 +29,10 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 4dd8033c0bd..aec087523b8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.cat; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; @@ -31,6 +30,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; @@ -38,7 +38,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java index 7c2f4ef61a3..f43c8c019d2 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java @@ -27,7 +27,10 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java index 2ac08fd74d4..d67d6bc2d28 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.rest.action.cat; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index 208b7e6c374..e86132a909e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -53,7 +53,10 @@ import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.jvm.JvmStats; import org.elasticsearch.monitor.os.OsStats; import org.elasticsearch.monitor.process.ProcessStats; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java index 136997c5c89..b563450f8f6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java index b52f8e6fc10..34e05223657 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.rest.action.cat; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.Client; @@ -31,7 +30,11 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.plugins.PluginInfo; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index 6574a01ae28..692d5bebbc9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -36,7 +36,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; diff --git a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java index 1ce78e33e3f..834b3d391b1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java @@ -30,7 +30,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.search.builder.SearchSourceBuilder; diff --git a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java index e583ed36274..4336c9db2d4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java @@ -21,15 +21,15 @@ package org.elasticsearch.rest.action.delete; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; -import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import static org.elasticsearch.rest.RestRequest.Method.DELETE; diff --git a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java index 086446fc53f..0e472bb0bf3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java @@ -32,7 +32,12 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.search.fetch.source.FetchSourceContext; diff --git a/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java index 647728b24ab..c314c4325d6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java @@ -28,7 +28,13 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; @@ -89,4 +95,4 @@ public class RestFieldStatsAction extends BaseRestHandler { } }); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java index 9ed5c4d5fe9..e85eef48574 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.search.fetch.source.FetchSourceContext; diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java index c0e45fc6aeb..ff6c04a6d12 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java @@ -26,7 +26,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.search.fetch.source.FetchSourceContext; diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java index d0c1433bb47..f32c07f20f1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java @@ -25,7 +25,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.HEAD; diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java index 14e4496085b..440312b7cb9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java @@ -25,7 +25,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestToXContentListener; import org.elasticsearch.search.fetch.source.FetchSourceContext; diff --git a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index 310ce0a1248..13a93299187 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -21,22 +21,24 @@ package org.elasticsearch.rest.action.index; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; -import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; -import static org.elasticsearch.rest.RestStatus.*; +import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; /** * diff --git a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java index df99979eb6b..42de9b898ae 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java @@ -27,7 +27,12 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; diff --git a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java index d9a1d9f4cb4..879ec78d754 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java @@ -25,7 +25,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestToXContentListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java index 4ee543f5362..052fa42104b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java @@ -27,7 +27,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestToXContentListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java index 4d8d678a304..b492e7c513f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java @@ -26,7 +26,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.DELETE; @@ -82,4 +88,4 @@ public class RestDeleteIndexedScriptAction extends BaseRestHandler { static final XContentBuilderString _ID = new XContentBuilderString("_id"); static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java index e45e7cdb625..a4c6784d415 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java @@ -26,7 +26,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import static org.elasticsearch.rest.RestRequest.Method.GET; diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java index 33145be6a47..ed440c2b9fa 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java @@ -27,14 +27,22 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; -import static org.elasticsearch.rest.RestStatus.*; +import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; +import static org.elasticsearch.rest.RestStatus.CREATED; +import static org.elasticsearch.rest.RestStatus.OK; /** * diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java index 5efd2584b38..b2a2905585c 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java @@ -29,7 +29,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java index 6fb72931804..eb7e0465902 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java @@ -30,7 +30,10 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import org.elasticsearch.search.Scroll; diff --git a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java index 8bf360dc36c..2841bbe1fe3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java @@ -19,9 +19,6 @@ package org.elasticsearch.rest.action.suggest; -import static org.elasticsearch.rest.RestRequest.Method.GET; -import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.action.support.RestActions.buildBroadcastShardsHeader; import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.action.support.IndicesOptions; @@ -41,6 +38,10 @@ import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.search.suggest.Suggest; +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.rest.action.support.RestActions.buildBroadcastShardsHeader; + /** * */ diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/AcknowledgedRestListener.java b/core/src/main/java/org/elasticsearch/rest/action/support/AcknowledgedRestListener.java index f1c15c69ba9..a59dc3d47b8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/AcknowledgedRestListener.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/AcknowledgedRestListener.java @@ -19,14 +19,11 @@ package org.elasticsearch.rest.action.support; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.action.support.RestActionListener; -import org.elasticsearch.rest.action.support.RestBuilderListener; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestActionListener.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestActionListener.java index 211635ef98f..9ebdc7b5d07 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestActionListener.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestActionListener.java @@ -25,8 +25,6 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; -import java.io.IOException; - /** * An action listener that requires {@link #processResponse(Object)} to be implemented * and will automatically handle failures. diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java index 14935f5f9a5..9c0bb61d748 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java @@ -27,8 +27,17 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.xcontent.*; -import org.elasticsearch.index.query.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.Operator; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestResponseListener.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestResponseListener.java index acdfaab737b..b5c9f2bcca8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestResponseListener.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestResponseListener.java @@ -19,15 +19,9 @@ package org.elasticsearch.rest.action.support; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestResponse; -import java.io.IOException; - /** * A REST enabled action listener that has a basic onFailure implementation, and requires * sub classes to only implement {@link #buildResponse(Object)}. diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java index 3808e58a527..13b638910ae 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java @@ -30,10 +30,17 @@ import org.elasticsearch.common.unit.SizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; /** */ diff --git a/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java index 0d23645afda..1523d299f03 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java @@ -21,7 +21,10 @@ package org.elasticsearch.rest.action.template; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.script.RestPutIndexedScriptAction; import org.elasticsearch.script.Template; diff --git a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java index 400869fff0e..fe897f9b09f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java @@ -26,7 +26,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestToXContentListener; diff --git a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java index f59c329fbc3..24264ca292e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java @@ -22,22 +22,16 @@ package org.elasticsearch.rest.action.update; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestActions; -import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptParameterParser; diff --git a/core/src/main/java/org/elasticsearch/script/AbstractSearchScript.java b/core/src/main/java/org/elasticsearch/script/AbstractSearchScript.java index 658131202a0..98e5ab11fb7 100644 --- a/core/src/main/java/org/elasticsearch/script/AbstractSearchScript.java +++ b/core/src/main/java/org/elasticsearch/script/AbstractSearchScript.java @@ -21,7 +21,11 @@ package org.elasticsearch.script; import org.apache.lucene.search.Scorer; import org.elasticsearch.index.fielddata.ScriptDocValues; -import org.elasticsearch.search.lookup.*; +import org.elasticsearch.search.lookup.LeafDocLookup; +import org.elasticsearch.search.lookup.LeafFieldsLookup; +import org.elasticsearch.search.lookup.LeafIndexLookup; +import org.elasticsearch.search.lookup.LeafSearchLookup; +import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.Map; @@ -82,7 +86,7 @@ public abstract class AbstractSearchScript extends AbstractExecutableScript impl protected final SourceLookup source() { return lookup.source(); } - + /** * Allows to access statistics on terms and fields. */ diff --git a/core/src/main/java/org/elasticsearch/script/NativeScriptEngineService.java b/core/src/main/java/org/elasticsearch/script/NativeScriptEngineService.java index 073a4eb5fa8..19bc4478884 100644 --- a/core/src/main/java/org/elasticsearch/script/NativeScriptEngineService.java +++ b/core/src/main/java/org/elasticsearch/script/NativeScriptEngineService.java @@ -62,7 +62,7 @@ public class NativeScriptEngineService extends AbstractComponent implements Scri } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { NativeScriptFactory scriptFactory = scripts.get(script); if (scriptFactory != null) { return scriptFactory; diff --git a/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java b/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java index 993c95ad797..41befc9406f 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java @@ -36,7 +36,7 @@ public interface ScriptEngineService extends Closeable { boolean sandboxed(); - Object compile(String script); + Object compile(String script, Map params); ExecutableScript executable(CompiledScript compiledScript, @Nullable Map vars); diff --git a/core/src/main/java/org/elasticsearch/script/ScriptParameterParser.java b/core/src/main/java/org/elasticsearch/script/ScriptParameterParser.java index b89e8603227..66af94a07b8 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptParameterParser.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptParameterParser.java @@ -27,7 +27,12 @@ import org.elasticsearch.script.Script.ScriptParseException; import org.elasticsearch.script.ScriptService.ScriptType; import java.io.IOException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; public class ScriptParameterParser { @@ -176,7 +181,7 @@ public class ScriptParameterParser { if (value != null) { String coreParameterName = parameter.getPreferredName(); putParameterValue(coreParameterName, value, ScriptType.INLINE); - + } } for (ParseField parameter : fileParameters) { @@ -184,7 +189,7 @@ public class ScriptParameterParser { if (value != null) { String coreParameterName = parameter.getPreferredName().replace(FILE_SUFFIX, ""); putParameterValue(coreParameterName, value, ScriptType.FILE); - + } } for (ParseField parameter : indexedParameters) { @@ -192,7 +197,7 @@ public class ScriptParameterParser { if (value != null) { String coreParameterName = parameter.getPreferredName().replace(INDEXED_SUFFIX, ""); putParameterValue(coreParameterName, value, ScriptType.INDEXED); - + } } } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index 3b91f2d3110..c9e9f9a873d 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -67,6 +67,7 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; @@ -96,9 +97,9 @@ public class ScriptService extends AbstractComponent implements Closeable { private final Map scriptEnginesByLang; private final Map scriptEnginesByExt; - private final ConcurrentMap staticCache = ConcurrentCollections.newConcurrentMap(); + private final ConcurrentMap staticCache = ConcurrentCollections.newConcurrentMap(); - private final Cache cache; + private final Cache cache; private final Path scriptsDirectory; private final ScriptModes scriptModes; @@ -153,7 +154,7 @@ public class ScriptService extends AbstractComponent implements Closeable { this.defaultLang = settings.get(DEFAULT_SCRIPTING_LANGUAGE_SETTING, DEFAULT_LANG); - CacheBuilder cacheBuilder = CacheBuilder.builder(); + CacheBuilder cacheBuilder = CacheBuilder.builder(); if (cacheMaxSize >= 0) { cacheBuilder.setMaximumWeight(cacheMaxSize); } @@ -224,7 +225,7 @@ public class ScriptService extends AbstractComponent implements Closeable { /** * Checks if a script can be executed and compiles it if needed, or returns the previously compiled and cached script. */ - public CompiledScript compile(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext) { + public CompiledScript compile(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext, Map params) { if (script == null) { throw new IllegalArgumentException("The parameter script (Script) must not be null."); } @@ -252,14 +253,14 @@ public class ScriptService extends AbstractComponent implements Closeable { " operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are not supported"); } - return compileInternal(script, headersContext); + return compileInternal(script, headersContext, params); } /** * Compiles a script straight-away, or returns the previously compiled and cached script, * without checking if it can be executed based on settings. */ - public CompiledScript compileInternal(Script script, HasContextAndHeaders context) { + public CompiledScript compileInternal(Script script, HasContextAndHeaders context, Map params) { if (script == null) { throw new IllegalArgumentException("The parameter script (Script) must not be null."); } @@ -277,7 +278,7 @@ public class ScriptService extends AbstractComponent implements Closeable { ScriptEngineService scriptEngineService = getScriptEngineServiceForLang(lang); if (type == ScriptType.FILE) { - String cacheKey = getCacheKey(scriptEngineService, name, null); + CacheKey cacheKey = new CacheKey(scriptEngineService, name, null, params); //On disk scripts will be loaded into the staticCache by the listener CompiledScript compiledScript = staticCache.get(cacheKey); @@ -299,14 +300,14 @@ public class ScriptService extends AbstractComponent implements Closeable { code = getScriptFromIndex(indexedScript.lang, indexedScript.id, context); } - String cacheKey = getCacheKey(scriptEngineService, type == ScriptType.INLINE ? null : name, code); + CacheKey cacheKey = new CacheKey(scriptEngineService, type == ScriptType.INLINE ? null : name, code, params); CompiledScript compiledScript = cache.get(cacheKey); if (compiledScript == null) { //Either an un-cached inline script or indexed script //If the script type is inline the name will be the same as the code for identification in exceptions try { - compiledScript = new CompiledScript(type, name, lang, scriptEngineService.compile(code)); + compiledScript = new CompiledScript(type, name, lang, scriptEngineService.compile(code, params)); } catch (Exception exception) { throw new ScriptException("Failed to compile " + type + " script [" + name + "] using lang [" + lang + "]", exception); } @@ -364,7 +365,7 @@ public class ScriptService extends AbstractComponent implements Closeable { //we don't know yet what the script will be used for, but if all of the operations for this lang with //indexed scripts are disabled, it makes no sense to even compile it. if (isAnyScriptContextEnabled(scriptLang, scriptEngineService, ScriptType.INDEXED)) { - Object compiled = scriptEngineService.compile(template.getScript()); + Object compiled = scriptEngineService.compile(template.getScript(), Collections.emptyMap()); if (compiled == null) { throw new IllegalArgumentException("Unable to parse [" + template.getScript() + "] lang [" + scriptLang + "] (ScriptService.compile returned null)"); @@ -436,8 +437,8 @@ public class ScriptService extends AbstractComponent implements Closeable { /** * Compiles (or retrieves from cache) and executes the provided script */ - public ExecutableScript executable(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext) { - return executable(compile(script, scriptContext, headersContext), script.getParams()); + public ExecutableScript executable(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext, Map params) { + return executable(compile(script, scriptContext, headersContext, params), script.getParams()); } /** @@ -450,8 +451,8 @@ public class ScriptService extends AbstractComponent implements Closeable { /** * Compiles (or retrieves from cache) and executes the provided search script */ - public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext) { - CompiledScript compiledScript = compile(script, scriptContext, SearchContext.current()); + public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext, Map params) { + CompiledScript compiledScript = compile(script, scriptContext, SearchContext.current(), params); return getScriptEngineServiceForLang(compiledScript.lang()).search(compiledScript, lookup, script.getParams()); } @@ -491,9 +492,9 @@ public class ScriptService extends AbstractComponent implements Closeable { * {@code ScriptEngineService}'s {@code scriptRemoved} method when the * script has been removed from the cache */ - private class ScriptCacheRemovalListener implements RemovalListener { + private class ScriptCacheRemovalListener implements RemovalListener { @Override - public void onRemoval(RemovalNotification notification) { + public void onRemoval(RemovalNotification notification) { scriptMetrics.onCacheEviction(); for (ScriptEngineService service : scriptEngines) { try { @@ -539,8 +540,8 @@ public class ScriptService extends AbstractComponent implements Closeable { logger.info("compiling script file [{}]", file.toAbsolutePath()); try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), StandardCharsets.UTF_8)) { String script = Streams.copyToString(reader); - String cacheKey = getCacheKey(engineService, scriptNameExt.v1(), null); - staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.types()[0], engineService.compile(script))); + CacheKey cacheKey = new CacheKey(engineService, scriptNameExt.v1(), null, Collections.emptyMap()); + staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.types()[0], engineService.compile(script, Collections.emptyMap()))); scriptMetrics.onCompilation(); } } else { @@ -565,7 +566,7 @@ public class ScriptService extends AbstractComponent implements Closeable { ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2()); assert engineService != null; logger.info("removing script file [{}]", file.toAbsolutePath()); - staticCache.remove(getCacheKey(engineService, scriptNameExt.v1(), null)); + staticCache.remove(new CacheKey(engineService, scriptNameExt.v1(), null, Collections.emptyMap())); } } @@ -625,11 +626,44 @@ public class ScriptService extends AbstractComponent implements Closeable { } } - private static String getCacheKey(ScriptEngineService scriptEngineService, String name, String code) { - String lang = scriptEngineService.types()[0]; - return lang + ":" + (name != null ? ":" + name : "") + (code != null ? ":" + code : ""); + private static final class CacheKey { + final String lang; + final String name; + final String code; + final Map params; + + private CacheKey(final ScriptEngineService service, final String name, final String code, final Map params) { + this.lang = service.types()[0]; + this.name = name; + this.code = code; + this.params = params; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + CacheKey cacheKey = (CacheKey)o; + + if (!lang.equals(cacheKey.lang)) return false; + if (name != null ? !name.equals(cacheKey.name) : cacheKey.name != null) return false; + if (code != null ? !code.equals(cacheKey.code) : cacheKey.code != null) return false; + return params.equals(cacheKey.params); + + } + + @Override + public int hashCode() { + int result = lang.hashCode(); + result = 31 * result + (name != null ? name.hashCode() : 0); + result = 31 * result + (code != null ? code.hashCode() : 0); + result = 31 * result + params.hashCode(); + return result; + } } + private static class IndexedScript { private final String lang; private final String id; diff --git a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java index 8096213b8fa..93659cf8856 100644 --- a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -20,7 +20,12 @@ package org.elasticsearch.search; -import org.apache.lucene.index.*; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index b84a5804c05..6f16d13bd92 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -21,7 +21,6 @@ package org.elasticsearch.search; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.Multibinder; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper; import org.elasticsearch.search.action.SearchServiceTransportAction; @@ -65,8 +64,11 @@ import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams; -import org.elasticsearch.search.aggregations.bucket.terms.*; +import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; +import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsParser; +import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms; import org.elasticsearch.search.aggregations.metrics.avg.AvgParser; import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityParser; @@ -127,7 +129,6 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelParserMapper; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelStreams; import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffParser; import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipelineAggregator; import org.elasticsearch.search.controller.SearchPhaseController; @@ -385,7 +386,7 @@ public class SearchModule extends AbstractModule { SumBucketPipelineAggregator.registerStreams(); StatsBucketPipelineAggregator.registerStreams(); ExtendedStatsBucketPipelineAggregator.registerStreams(); - PercentilesBucketPipelineAggregator.registerStreams(); + PercentilesBucketPipelineAggregator.registerStreams(); MovAvgPipelineAggregator.registerStreams(); CumulativeSumPipelineAggregator.registerStreams(); BucketScriptPipelineAggregator.registerStreams(); diff --git a/core/src/main/java/org/elasticsearch/search/SearchParseException.java b/core/src/main/java/org/elasticsearch/search/SearchParseException.java index 49f8a9c7cf0..c0a9a370270 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchParseException.java +++ b/core/src/main/java/org/elasticsearch/search/SearchParseException.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentLocation; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.internal.SearchContext; @@ -86,7 +85,7 @@ public class SearchParseException extends SearchContextException { /** * Line number of the location of the error - * + * * @return the line number or -1 if unknown */ public int getLineNumber() { @@ -95,7 +94,7 @@ public class SearchParseException extends SearchContextException { /** * Column number of the location of the error - * + * * @return the column number or -1 if unknown */ public int getColumnNumber() { diff --git a/core/src/main/java/org/elasticsearch/search/SearchPhase.java b/core/src/main/java/org/elasticsearch/search/SearchPhase.java index bdab128fecb..85a75b9738d 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/SearchPhase.java @@ -19,7 +19,6 @@ package org.elasticsearch.search; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.search.internal.SearchContext; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index eb8414bb32e..02efa373ab0 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -38,6 +38,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; @@ -70,7 +72,6 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.IndicesWarmer.TerminationHandle; import org.elasticsearch.indices.cache.request.IndicesRequestCache; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; @@ -78,19 +79,34 @@ import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.dfs.DfsPhase; import org.elasticsearch.search.dfs.DfsSearchResult; -import org.elasticsearch.search.fetch.*; +import org.elasticsearch.search.fetch.FetchPhase; +import org.elasticsearch.search.fetch.FetchSearchResult; +import org.elasticsearch.search.fetch.QueryFetchSearchResult; +import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult; +import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext; import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext.FieldDataField; import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase; import org.elasticsearch.search.fetch.script.ScriptFieldsContext.ScriptField; import org.elasticsearch.search.highlight.HighlightBuilder; -import org.elasticsearch.search.internal.*; +import org.elasticsearch.search.internal.DefaultSearchContext; +import org.elasticsearch.search.internal.InternalScrollSearchRequest; +import org.elasticsearch.search.internal.ScrollContext; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext.Lifetime; -import org.elasticsearch.search.query.*; +import org.elasticsearch.search.internal.ShardSearchLocalRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.search.profile.Profilers; +import org.elasticsearch.search.query.QueryPhase; +import org.elasticsearch.search.query.QuerySearchRequest; +import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.search.query.QuerySearchResultProvider; +import org.elasticsearch.search.query.ScrollQuerySearchResult; import org.elasticsearch.search.warmer.IndexWarmersMetaData; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -111,9 +127,10 @@ public class SearchService extends AbstractLifecycleComponent imp public static final String NORMS_LOADING_KEY = "index.norms.loading"; public static final String DEFAULT_KEEPALIVE_KEY = "search.default_keep_alive"; public static final String KEEPALIVE_INTERVAL_KEY = "search.keep_alive_interval"; - public static final String DEFAULT_SEARCH_TIMEOUT = "search.default_search_timeout"; public static final TimeValue NO_TIMEOUT = timeValueMillis(-1); + public static final Setting DEFAULT_SEARCH_TIMEOUT_SETTING = Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, true, Setting.Scope.CLUSTER); + private final ThreadPool threadPool; @@ -152,7 +169,7 @@ public class SearchService extends AbstractLifecycleComponent imp private final ParseFieldMatcher parseFieldMatcher; @Inject - public SearchService(Settings settings, NodeSettingsService nodeSettingsService, ClusterService clusterService, IndicesService indicesService,IndicesWarmer indicesWarmer, ThreadPool threadPool, + public SearchService(Settings settings, ClusterSettings clusterSettings, ClusterService clusterService, IndicesService indicesService, IndicesWarmer indicesWarmer, ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase, IndicesRequestCache indicesQueryCache) { super(settings); @@ -186,19 +203,12 @@ public class SearchService extends AbstractLifecycleComponent imp this.indicesWarmer.addListener(new FieldDataWarmer(indicesWarmer)); this.indicesWarmer.addListener(new SearchWarmer()); - defaultSearchTimeout = settings.getAsTime(DEFAULT_SEARCH_TIMEOUT, NO_TIMEOUT); - nodeSettingsService.addListener(new SearchSettingsListener()); + defaultSearchTimeout = DEFAULT_SEARCH_TIMEOUT_SETTING.get(settings); + clusterSettings.addSettingsUpdateConsumer(DEFAULT_SEARCH_TIMEOUT_SETTING, this::setDefaultSearchTimeout); } - class SearchSettingsListener implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - final TimeValue maybeNewDefaultSearchTimeout = settings.getAsTime(SearchService.DEFAULT_SEARCH_TIMEOUT, SearchService.this.defaultSearchTimeout); - if (!maybeNewDefaultSearchTimeout.equals(SearchService.this.defaultSearchTimeout)) { - logger.info("updating [{}] from [{}] to [{}]", SearchService.DEFAULT_SEARCH_TIMEOUT, SearchService.this.defaultSearchTimeout, maybeNewDefaultSearchTimeout); - SearchService.this.defaultSearchTimeout = maybeNewDefaultSearchTimeout; - } - } + private void setDefaultSearchTimeout(TimeValue defaultSearchTimeout) { + this.defaultSearchTimeout = defaultSearchTimeout; } @Override @@ -551,7 +561,7 @@ public class SearchService extends AbstractLifecycleComponent imp Engine.Searcher engineSearcher = searcher == null ? indexShard.acquireSearcher("search") : searcher; - SearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher, defaultSearchTimeout); + DefaultSearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher, defaultSearchTimeout); SearchContext.setCurrent(context); try { @@ -560,7 +570,7 @@ public class SearchService extends AbstractLifecycleComponent imp context.scrollContext().scroll = request.scroll(); } if (request.template() != null) { - ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, context); + ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, context, Collections.emptyMap()); BytesReference run = (BytesReference) executable.run(); try (XContentParser parser = XContentFactory.xContent(run).createParser(run)) { QueryParseContext queryParseContext = new QueryParseContext(indicesService.getIndicesQueryRegistry()); @@ -658,7 +668,7 @@ public class SearchService extends AbstractLifecycleComponent imp } } - private void parseSource(SearchContext context, SearchSourceBuilder source) throws SearchContextException { + private void parseSource(DefaultSearchContext context, SearchSourceBuilder source) throws SearchContextException { // nothing to parse... if (source == null) { return; @@ -714,6 +724,9 @@ public class SearchService extends AbstractLifecycleComponent imp if (source.minScore() != null) { context.minimumScore(source.minScore()); } + if (source.profile()) { + context.setProfilers(new Profilers(context.searcher())); + } context.timeoutInMillis(source.timeoutInMillis()); context.terminateAfter(source.terminateAfter()); if (source.aggregations() != null) { @@ -835,7 +848,7 @@ public class SearchService extends AbstractLifecycleComponent imp } if (source.scriptFields() != null) { for (org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField field : source.scriptFields()) { - SearchScript searchScript = context.scriptService().search(context.lookup(), field.script(), ScriptContext.Standard.SEARCH); + SearchScript searchScript = context.scriptService().search(context.lookup(), field.script(), ScriptContext.Standard.SEARCH, Collections.emptyMap()); context.scriptFields().add(new ScriptField(field.fieldName(), searchScript, field.ignoreFailure())); } } @@ -958,7 +971,7 @@ public class SearchService extends AbstractLifecycleComponent imp final ObjectSet warmUp = new ObjectHashSet<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { - final String indexName = fieldMapper.fieldType().names().indexName(); + final String indexName = fieldMapper.fieldType().name(); Loading normsLoading = fieldMapper.fieldType().normsLoading(); if (normsLoading == null) { normsLoading = defaultLoading; @@ -1034,10 +1047,10 @@ public class SearchService extends AbstractLifecycleComponent imp fieldDataType = joinFieldType.fieldDataType(); // TODO: this can be removed in 3.0 when the old parent/child impl is removed: // related to: https://github.com/elastic/elasticsearch/pull/12418 - indexName = fieldMapper.fieldType().names().indexName(); + indexName = fieldMapper.fieldType().name(); } else { fieldDataType = fieldMapper.fieldType().fieldDataType(); - indexName = fieldMapper.fieldType().names().indexName(); + indexName = fieldMapper.fieldType().name(); } if (fieldDataType == null) { @@ -1066,10 +1079,10 @@ public class SearchService extends AbstractLifecycleComponent imp final long start = System.nanoTime(); indexFieldDataService.getForField(fieldType).load(ctx); if (indexShard.warmerService().logger().isTraceEnabled()) { - indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldType.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); + indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldType.name(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Throwable t) { - indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldType.names().fullName()); + indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldType.name()); } finally { latch.countDown(); } @@ -1102,10 +1115,10 @@ public class SearchService extends AbstractLifecycleComponent imp fieldDataType = joinFieldType.fieldDataType(); // TODO: this can be removed in 3.0 when the old parent/child impl is removed: // related to: https://github.com/elastic/elasticsearch/pull/12418 - indexName = fieldMapper.fieldType().names().indexName(); + indexName = fieldMapper.fieldType().name(); } else { fieldDataType = fieldMapper.fieldType().fieldDataType(); - indexName = fieldMapper.fieldType().names().indexName(); + indexName = fieldMapper.fieldType().name(); } if (fieldDataType == null) { continue; @@ -1131,10 +1144,10 @@ public class SearchService extends AbstractLifecycleComponent imp IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldType); ifd.loadGlobal(searcher.getDirectoryReader()); if (indexShard.warmerService().logger().isTraceEnabled()) { - indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldType.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); + indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldType.name(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Throwable t) { - indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldType.names().fullName()); + indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldType.name()); } finally { latch.countDown(); } diff --git a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java index 1a12751d396..87a2114a788 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java +++ b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.text.StringAndBytesText; import org.elasticsearch.common.text.Text; import java.io.IOException; @@ -42,8 +41,8 @@ public class SearchShardTarget implements Streamable, Comparable deferredCollectors) { this.collector = BucketCollector.wrap(deferredCollectors); } - + public final void replay(long... selectedBuckets) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java index 2c597bdcee9..438e872be77 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java @@ -88,7 +88,7 @@ public class ChildrenParser implements Aggregator.Parser { parentFilter = parentDocMapper.typeFilter(); childFilter = childDocMapper.typeFilter(); ParentChildIndexFieldData parentChildIndexFieldData = context.fieldData().getForField(parentFieldMapper.fieldType()); - config.fieldContext(new FieldContext(parentFieldMapper.fieldType().names().indexName(), parentChildIndexFieldData, parentFieldMapper.fieldType())); + config.fieldContext(new FieldContext(parentFieldMapper.fieldType().name(), parentChildIndexFieldData, parentFieldMapper.fieldType())); } else { config.unmapped(true); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java index 6d9a1edc712..63819b978d2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java @@ -18,9 +18,14 @@ */ package org.elasticsearch.search.aggregations.bucket.children; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedDocValues; -import org.apache.lucene.search.*; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.Lucene; @@ -40,10 +45,8 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import java.io.IOException; import java.util.Arrays; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; -import java.util.Set; // The RecordingPerReaderBucketCollector assumes per segment recording which isn't the case for this // aggregation, for this reason that collector can't be used @@ -64,9 +67,6 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { private final LongObjectPagedHashMap parentOrdToOtherBuckets; private boolean multipleBucketsPerParentOrd = false; - // This needs to be a Set to avoid duplicate reader context entries via (#setNextReader(...), it can get invoked multiple times with the same reader context) - private Set replay = new LinkedHashSet<>(); - public ParentToChildrenAggregator(String name, AggregatorFactories factories, AggregationContext aggregationContext, Aggregator parent, String parentType, Query childFilter, Query parentFilter, ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource, @@ -99,17 +99,11 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } - if (replay == null) { - throw new IllegalStateException(); - } final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx); assert globalOrdinals != null; Scorer parentScorer = parentFilter.scorer(ctx); final Bits parentDocs = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), parentScorer); - if (childFilter.scorer(ctx) != null) { - replay.add(ctx); - } return new LeafBucketCollector() { @Override @@ -138,14 +132,13 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { @Override protected void doPostCollection() throws IOException { - final Set replay = this.replay; - this.replay = null; - - for (LeafReaderContext ctx : replay) { - DocIdSetIterator childDocsIter = childFilter.scorer(ctx); - if (childDocsIter == null) { + IndexReader indexReader = context().searchContext().searcher().getIndexReader(); + for (LeafReaderContext ctx : indexReader.leaves()) { + Scorer childDocsScorer = childFilter.scorer(ctx); + if (childDocsScorer == null) { continue; } + DocIdSetIterator childDocsIter = childDocsScorer.iterator(); final LeafBucketCollector sub = collectableSubAggregators.getLeafCollector(ctx); final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java index 192d624b5e0..48702dab230 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations.bucket.filter; import org.apache.lucene.search.MatchAllDocsQuery; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.search.aggregations.Aggregator; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java index 3cd67f835ec..eec7064d1bd 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java @@ -112,16 +112,16 @@ public class FiltersAggregator extends BucketsAggregator { @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { - List buckets = new ArrayList<>(filters.length); + List buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { long bucketOrd = bucketOrd(owningBucketOrdinal, i); - InternalFilters.Bucket bucket = new InternalFilters.Bucket(keys[i], bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(keys[i], bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed); buckets.add(bucket); } // other bucket if (showOtherBucket) { long bucketOrd = bucketOrd(owningBucketOrdinal, keys.length); - InternalFilters.Bucket bucket = new InternalFilters.Bucket(otherBucketKey, bucketDocCount(bucketOrd), + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed); buckets.add(bucket); } @@ -131,9 +131,9 @@ public class FiltersAggregator extends BucketsAggregator { @Override public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); - List buckets = new ArrayList<>(filters.length); + List buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { - InternalFilters.Bucket bucket = new InternalFilters.Bucket(keys[i], 0, subAggs, keyed); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(keys[i], 0, subAggs, keyed); buckets.add(bucket); } return new InternalFilters(name, buckets, keyed, pipelineAggregators(), metaData()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java index e415a025a4c..aed398d51df 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java @@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation.InternalBucket; import org.elasticsearch.search.aggregations.bucket.BucketStreamContext; import org.elasticsearch.search.aggregations.bucket.BucketStreams; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -41,7 +40,7 @@ import java.util.Map; /** * */ -public class InternalFilters extends InternalMultiBucketAggregation implements Filters { +public class InternalFilters extends InternalMultiBucketAggregation implements Filters { public final static Type TYPE = new Type("filters"); @@ -54,16 +53,16 @@ public class InternalFilters extends InternalMultiBucketAggregation BUCKET_STREAM = new BucketStreams.Stream() { + private final static BucketStreams.Stream BUCKET_STREAM = new BucketStreams.Stream() { @Override - public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException { - Bucket filters = new Bucket(context.keyed()); + public InternalBucket readResult(StreamInput in, BucketStreamContext context) throws IOException { + InternalBucket filters = new InternalBucket(context.keyed()); filters.readFrom(in); return filters; } @Override - public BucketStreamContext getBucketStreamContext(Bucket bucket) { + public BucketStreamContext getBucketStreamContext(InternalBucket bucket) { BucketStreamContext context = new BucketStreamContext(); context.keyed(bucket.keyed); return context; @@ -75,19 +74,19 @@ public class InternalFilters extends InternalMultiBucketAggregation buckets, ReduceContext context) { - Bucket reduced = null; + InternalBucket reduce(List buckets, ReduceContext context) { + InternalBucket reduced = null; List aggregationsList = new ArrayList<>(buckets.size()); - for (Bucket bucket : buckets) { + for (InternalBucket bucket : buckets) { if (reduced == null) { - reduced = new Bucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed); + reduced = new InternalBucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed); } else { reduced.docCount += bucket.docCount; } @@ -157,13 +156,13 @@ public class InternalFilters extends InternalMultiBucketAggregation buckets; - private Map bucketMap; + private List buckets; + private Map bucketMap; private boolean keyed; public InternalFilters() {} // for serialization - public InternalFilters(String name, List buckets, boolean keyed, List pipelineAggregators, Map metaData) { + public InternalFilters(String name, List buckets, boolean keyed, List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); this.buckets = buckets; this.keyed = keyed; @@ -175,25 +174,25 @@ public class InternalFilters extends InternalMultiBucketAggregation buckets) { + public InternalFilters create(List buckets) { return new InternalFilters(this.name, buckets, this.keyed, this.pipelineAggregators(), this.metaData); } @Override - public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.key, prototype.docCount, aggregations, prototype.keyed); + public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { + return new InternalBucket(prototype.key, prototype.docCount, aggregations, prototype.keyed); } @Override - public List getBuckets() { + public List getBuckets() { return buckets; } @Override - public Bucket getBucketByKey(String key) { + public InternalBucket getBucketByKey(String key) { if (bucketMap == null) { bucketMap = new HashMap<>(buckets.size()); - for (Bucket bucket : buckets) { + for (InternalBucket bucket : buckets) { bucketMap.put(bucket.getKey(), bucket); } } @@ -202,26 +201,26 @@ public class InternalFilters extends InternalMultiBucketAggregation aggregations, ReduceContext reduceContext) { - List> bucketsList = null; + List> bucketsList = null; for (InternalAggregation aggregation : aggregations) { InternalFilters filters = (InternalFilters) aggregation; if (bucketsList == null) { bucketsList = new ArrayList<>(filters.buckets.size()); - for (Bucket bucket : filters.buckets) { - List sameRangeList = new ArrayList<>(aggregations.size()); + for (InternalBucket bucket : filters.buckets) { + List sameRangeList = new ArrayList<>(aggregations.size()); sameRangeList.add(bucket); bucketsList.add(sameRangeList); } } else { int i = 0; - for (Bucket bucket : filters.buckets) { + for (InternalBucket bucket : filters.buckets) { bucketsList.get(i++).add(bucket); } } } - InternalFilters reduced = new InternalFilters(name, new ArrayList(bucketsList.size()), keyed, pipelineAggregators(), getMetaData()); - for (List sameRangeList : bucketsList) { + InternalFilters reduced = new InternalFilters(name, new ArrayList(bucketsList.size()), keyed, pipelineAggregators(), getMetaData()); + for (List sameRangeList : bucketsList) { reduced.buckets.add((sameRangeList.get(0)).reduce(sameRangeList, reduceContext)); } return reduced; @@ -231,9 +230,9 @@ public class InternalFilters extends InternalMultiBucketAggregation buckets = new ArrayList<>(size); + List buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { - Bucket bucket = new Bucket(keyed); + InternalBucket bucket = new InternalBucket(keyed); bucket.readFrom(in); buckets.add(bucket); } @@ -245,7 +244,7 @@ public class InternalFilters extends InternalMultiBucketAggregation extends Inter @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (formatter != ValueFormatter.RAW) { - Text keyTxt = new StringText(formatter.format(key)); + Text keyTxt = new Text(formatter.format(key)); if (keyed) { builder.startObject(keyTxt.string()); } else { @@ -392,12 +391,14 @@ public class InternalHistogram extends Inter return reducedBuckets; } - private void addEmptyBuckets(List list) { + private void addEmptyBuckets(List list, ReduceContext reduceContext) { B lastBucket = null; ExtendedBounds bounds = emptyBucketInfo.bounds; ListIterator iter = list.listIterator(); // first adding all the empty buckets *before* the actual data (based on th extended_bounds.min the user requested) + InternalAggregations reducedEmptySubAggs = InternalAggregations.reduce(Collections.singletonList(emptyBucketInfo.subAggregations), + reduceContext); if (bounds != null) { B firstBucket = iter.hasNext() ? list.get(iter.nextIndex()) : null; if (firstBucket == null) { @@ -405,7 +406,9 @@ public class InternalHistogram extends Inter long key = bounds.min; long max = bounds.max; while (key <= max) { - iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter)); + iter.add(getFactory().createBucket(key, 0, + reducedEmptySubAggs, + keyed, formatter)); key = emptyBucketInfo.rounding.nextRoundingValue(key); } } @@ -414,7 +417,9 @@ public class InternalHistogram extends Inter long key = bounds.min; if (key < firstBucket.key) { while (key < firstBucket.key) { - iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter)); + iter.add(getFactory().createBucket(key, 0, + reducedEmptySubAggs, + keyed, formatter)); key = emptyBucketInfo.rounding.nextRoundingValue(key); } } @@ -429,7 +434,9 @@ public class InternalHistogram extends Inter if (lastBucket != null) { long key = emptyBucketInfo.rounding.nextRoundingValue(lastBucket.key); while (key < nextBucket.key) { - iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter)); + iter.add(getFactory().createBucket(key, 0, + reducedEmptySubAggs, keyed, + formatter)); key = emptyBucketInfo.rounding.nextRoundingValue(key); } assert key == nextBucket.key; @@ -442,7 +449,9 @@ public class InternalHistogram extends Inter long key = emptyBucketInfo.rounding.nextRoundingValue(lastBucket.key); long max = bounds.max; while (key <= max) { - iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter)); + iter.add(getFactory().createBucket(key, 0, + reducedEmptySubAggs, keyed, + formatter)); key = emptyBucketInfo.rounding.nextRoundingValue(key); } } @@ -454,7 +463,7 @@ public class InternalHistogram extends Inter // adding empty buckets if needed if (minDocCount == 0) { - addEmptyBuckets(reducedBuckets); + addEmptyBuckets(reducedBuckets, reduceContext); } if (order == InternalOrder.KEY_ASC) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 8c227694bf3..fa23cf8dd93 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; @@ -69,7 +70,12 @@ public class NestedAggregator extends SingleBucketAggregator { final IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); final Weight weight = searcher.createNormalizedWeight(childFilter, false); - childDocs = weight.scorer(ctx); + Scorer childDocsScorer = weight.scorer(ctx); + if (childDocsScorer == null) { + childDocs = null; + } else { + childDocs = childDocsScorer.iterator(); + } return new LeafBucketCollectorBase(sub, null) { @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java index 4010858f259..1b9363caa57 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations.bucket.nested; import com.carrotsearch.hppc.LongIntHashMap; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; @@ -79,8 +78,8 @@ public class ReverseNestedAggregator extends SingleBucketAggregator { // fast forward to retrieve the parentDoc this childDoc belongs to final int parentDoc = parentDocs.nextSetBit(childDoc); assert childDoc <= parentDoc && parentDoc != DocIdSetIterator.NO_MORE_DOCS; - - int keySlot = bucketOrdToLastCollectedParentDoc.indexOf(bucket); + + int keySlot = bucketOrdToLastCollectedParentDoc.indexOf(bucket); if (bucketOrdToLastCollectedParentDoc.indexExists(keySlot)) { int lastCollectedParentDoc = bucketOrdToLastCollectedParentDoc.indexGet(keySlot); if (parentDoc > lastCollectedParentDoc) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/InternalIPv4Range.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/InternalIPv4Range.java index a50c1c109f3..e20d1ac931f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/InternalIPv4Range.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/InternalIPv4Range.java @@ -32,8 +32,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import static org.elasticsearch.index.mapper.ip.IpFieldMapper.MAX_IP; - /** * */ diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java index 8b0862fed29..dc1e2a65d37 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.bucket.range.ipv4; import org.elasticsearch.common.network.Cidrs; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.ip.IpFieldMapper; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java index 00b10ee63c3..8cb980954cb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -22,7 +22,13 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.search.aggregations.*; +import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.NonCollectingAggregator; import org.elasticsearch.search.aggregations.bucket.BestDocsDeferringCollector; import org.elasticsearch.search.aggregations.bucket.DeferringBucketCollector; import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator; @@ -39,7 +45,7 @@ import java.util.Map; /** * Aggregate on only the top-scoring docs on a shard. - * + * * TODO currently the diversity feature of this agg offers only 'script' and * 'field' as a means of generating a de-dup value. In future it would be nice * if users could use any of the "bucket" aggs syntax (geo, date histogram...) @@ -131,8 +137,8 @@ public class SamplerAggregator extends SingleBucketAggregator { public String toString() { return parseField.getPreferredName(); } - } - + } + protected final int shardSize; protected BestDocsDeferringCollector bdd; @@ -213,7 +219,7 @@ public class SamplerAggregator extends SingleBucketAggregator { return new DiversifiedNumericSamplerAggregator(name, shardSize, factories, context, parent, pipelineAggregators, metaData, (Numeric) valuesSource, maxDocsPerValue); } - + if (valuesSource instanceof ValuesSource.Bytes) { ExecutionMode execution = null; if (executionHint != null) { @@ -231,7 +237,7 @@ public class SamplerAggregator extends SingleBucketAggregator { return execution.create(name, factories, shardSize, maxDocsPerValue, valuesSource, context, parent, pipelineAggregators, metaData); } - + throw new AggregationExecutionException("Sampler aggregation cannot be applied to field [" + config.fieldContext().field() + "]. It can only be applied to numeric or string fields."); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index 536c3945e8c..399e85728af 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -30,7 +30,11 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lucene.index.FilterableTermsEnum; import org.elasticsearch.common.lucene.index.FreqTermsEnum; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.search.aggregations.*; +import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.NonCollectingAggregator; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; @@ -198,7 +202,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac pipelineAggregators, metaData); } - + if ((includeExclude != null) && (includeExclude.isRegexBased())) { throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude " + "settings as they can only be applied to string fields. Use an array of numeric values for include/exclude clauses used to filter numeric fields"); @@ -223,12 +227,12 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac /** * Creates the TermsEnum (if not already created) and must be called before any calls to getBackgroundFrequency - * @param context The aggregation context + * @param context The aggregation context * @return The number of documents in the index (after an optional filter might have been applied) */ public long prepareBackground(AggregationContext context) { if (termsEnum != null) { - // already prepared - return + // already prepared - return return termsEnum.getNumDocs(); } SearchContext searchContext = context.searchContext(); @@ -238,7 +242,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac // Setup a termsEnum for sole use by one aggregator termsEnum = new FilterableTermsEnum(reader, indexedFieldName, PostingsEnum.NONE, filter); } else { - // When we have > 1 agg we have possibility of duplicate term frequency lookups + // When we have > 1 agg we have possibility of duplicate term frequency lookups // and so use a TermsEnum that caches results of all term lookups termsEnum = new FreqTermsEnum(reader, indexedFieldName, true, false, filter, searchContext.bigArrays()); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java index 046ca717b9f..9efea000512 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java @@ -30,13 +30,18 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardException; -import org.elasticsearch.script.*; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.Script; import org.elasticsearch.script.Script.ScriptField; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -82,7 +87,7 @@ public class ScriptHeuristic extends SignificanceHeuristic { @Override public void initialize(InternalAggregation.ReduceContext context) { - searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS, context); + searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS, context, Collections.emptyMap()); searchScript.setNextVar("_subset_freq", subsetDfHolder); searchScript.setNextVar("_subset_size", subsetSizeHolder); searchScript.setNextVar("_superset_freq", supersetDfHolder); @@ -170,7 +175,7 @@ public class ScriptHeuristic extends SignificanceHeuristic { } ExecutableScript searchScript; try { - searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, context); + searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, context, Collections.emptyMap()); } catch (Exception e) { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. the script [{}] could not be loaded", e, script, heuristicName); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParser.java index ff236460e72..92baa43e6b3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParser.java @@ -21,8 +21,8 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java index 64d2ae659e0..198f129c28a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.search.aggregations.bucket.significant.heuristics; -import org.elasticsearch.common.collect.CopyOnWriteHashMap; import org.elasticsearch.common.io.stream.StreamInput; + import java.io.IOException; import java.util.Collections; import java.util.HashMap; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 6598f6dbdbb..1e7a0047ea5 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -267,9 +267,9 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr private final LongHash bucketOrds; - public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, + public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, Terms.Order order, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude, AggregationContext aggregationContext, - Aggregator parent, SubAggCollectionMode collectionMode, + Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List pipelineAggregators, Map metaData) throws IOException { super(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, collectionMode, @@ -341,7 +341,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr private RandomAccessOrds segmentOrds; public LowCardinality(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, - Terms.Order order, + Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List pipelineAggregators, Map metaData) throws IOException { @@ -411,11 +411,10 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr // This is the cleanest way I can think of so far GlobalOrdinalMapping mapping; - if (globalOrds instanceof GlobalOrdinalMapping) { - mapping = (GlobalOrdinalMapping) globalOrds; - } else { - assert globalOrds.getValueCount() == segmentOrds.getValueCount(); + if (globalOrds.getValueCount() == segmentOrds.getValueCount()) { mapping = null; + } else { + mapping = (GlobalOrdinalMapping) globalOrds; } for (long i = 1; i < segmentDocCounts.size(); i++) { // We use set(...) here, because we need to reset the slow to 0. diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java index 26c2eee2f6b..c270517cd9d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java @@ -91,7 +91,7 @@ public class LongTerms extends InternalTerms { @Override public String getKeyAsString() { - return String.valueOf(term); + return formatter.format(term); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java index 4f9faae962c..776e49746e1 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java @@ -26,9 +26,9 @@ import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -154,7 +154,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator { } // replay any deferred collections runDeferredCollections(survivingBucketOrds); - + // Now build the aggs for (int i = 0; i < list.length; i++) { final StringTerms.Bucket bucket = (StringTerms.Bucket)list[i]; @@ -162,7 +162,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator { bucket.aggregations = bucketAggregations(bucket.bucketOrd); bucket.docCountError = 0; } - + return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, pipelineAggregators(), metaData()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index f6948649785..270dc009af2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -21,8 +21,12 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.search.aggregations.*; +import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.NonCollectingAggregator; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -90,7 +94,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) throws IOException { - if (includeExclude != null || factories.count() > 0) { + if (includeExclude != null || factories.count() > 0 + // we need the FieldData impl to be able to extract the + // segment to global ord mapping + || valuesSource.getClass() != ValuesSource.Bytes.FieldData.class) { return GLOBAL_ORDINALS.create(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java index 98abe2b464a..9c33a987a9b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.bucket.terms.support; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; - import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.Terms; @@ -124,7 +123,7 @@ public class IncludeExclude { public static abstract class OrdinalsFilter { public abstract LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) throws IOException; - + } static class AutomatonBackedOrdinalsFilter extends OrdinalsFilter { @@ -137,7 +136,7 @@ public class IncludeExclude { /** * Computes which global ordinals are accepted by this IncludeExclude instance. - * + * */ @Override public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) throws IOException { @@ -153,7 +152,7 @@ public class IncludeExclude { } } - + static class TermListBackedOrdinalsFilter extends OrdinalsFilter { private final SortedSet includeValues; @@ -173,7 +172,7 @@ public class IncludeExclude { if (ord >= 0) { acceptedGlobalOrdinals.set(ord); } - } + } } else { // default to all terms being acceptable acceptedGlobalOrdinals.set(0, acceptedGlobalOrdinals.length()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java index 3834a65a3ad..b99db25a655 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.metrics.geocentroid; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.util.GeoHashUtils; import org.apache.lucene.util.GeoUtils; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.lease.Releasables; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java index e264cf694dc..0b300403c9d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.metrics.percentiles; import com.carrotsearch.hppc.DoubleArrayList; - import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchParseException; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java index d39a0335ac3..00c6b6b49bb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -91,7 +92,7 @@ public class InternalScriptedMetric extends InternalMetricsAggregation implement vars.putAll(firstAggregation.reduceScript.getParams()); } CompiledScript compiledScript = reduceContext.scriptService().compile(firstAggregation.reduceScript, - ScriptContext.Standard.AGGS, reduceContext); + ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap()); ExecutableScript script = reduceContext.scriptService().executable(compiledScript, vars); aggregation = script.run(); } else { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java index 2c1caaa5241..6603c6289b2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java @@ -39,6 +39,7 @@ import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -58,11 +59,11 @@ public class ScriptedMetricAggregator extends MetricsAggregator { this.params = params; ScriptService scriptService = context.searchContext().scriptService(); if (initScript != null) { - scriptService.executable(initScript, ScriptContext.Standard.AGGS, context.searchContext()).run(); + scriptService.executable(initScript, ScriptContext.Standard.AGGS, context.searchContext(), Collections.emptyMap()).run(); } - this.mapScript = scriptService.search(context.searchContext().lookup(), mapScript, ScriptContext.Standard.AGGS); + this.mapScript = scriptService.search(context.searchContext().lookup(), mapScript, ScriptContext.Standard.AGGS, Collections.emptyMap()); if (combineScript != null) { - this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS, context.searchContext()); + this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS, context.searchContext(), Collections.emptyMap()); } else { this.combineScript = null; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricBuilder.java index 4bbb407f788..803123ffc59 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricBuilder.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.metrics.scripted; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.search.aggregations.metrics.MetricsAggregationBuilder; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java index d80b087930c..6fbc6f8c6d6 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java @@ -22,12 +22,12 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketBuilder; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketBuilder; -import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptBuilder; +import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumBuilder; import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeBuilder; import org.elasticsearch.search.aggregations.pipeline.having.BucketSelectorBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgBuilder; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java index 1e5dd46eca6..88a1f42b174 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java @@ -20,8 +20,8 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketParser; import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeParser; @@ -64,4 +64,4 @@ public abstract class BucketMetricsBuilder> ex protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException { } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java index 287fb7b3402..3cf084b239a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java index 683db6c7d68..4cd584a0b03 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java @@ -19,8 +19,8 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java index 92e8b01f43b..24e820481a3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java @@ -31,7 +31,10 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetric import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java index 789f8c961a3..76cb15ed46d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java @@ -41,7 +41,11 @@ import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.stream.Collectors; import java.util.stream.StreamSupport; @@ -90,7 +94,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator { InternalMultiBucketAggregation originalAgg = (InternalMultiBucketAggregation) aggregation; List buckets = originalAgg.getBuckets(); - CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext); + CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap()); List newBuckets = new ArrayList<>(); for (Bucket bucket : buckets) { Map vars = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java index ab077634222..50b4578346d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java @@ -21,8 +21,8 @@ package org.elasticsearch.search.aggregations.pipeline.derivative; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java index 3da0d93e8e0..855fea80f2b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java @@ -124,7 +124,7 @@ public class DerivativePipelineAggregator extends PipelineAggregator { } else if (key instanceof Number) { return ((Number) key).longValue(); } else { - throw new AggregationExecutionException("Bucket keys must be either a Number or a DateTime for aggregation " + name() + throw new AggregationExecutionException("InternalBucket keys must be either a Number or a DateTime for aggregation " + name() + ". Found bucket with key " + key); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java index 669a223b215..edc3b4e87ce 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java @@ -38,6 +38,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -88,7 +89,7 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator { InternalMultiBucketAggregation originalAgg = (InternalMultiBucketAggregation) aggregation; List buckets = originalAgg.getBuckets(); - CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext); + CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap()); List newBuckets = new ArrayList<>(); for (Bucket bucket : buckets) { Map vars = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java index 48686b9a6b9..4f7034b633f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.pipeline.movavg; import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.InternalAggregation; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java index 55611a0d000..fe0321bf0fc 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java @@ -29,7 +29,8 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; import java.io.IOException; import java.text.ParseException; -import java.util.*; +import java.util.Collection; +import java.util.Map; /** * Calculate a doubly exponential weighted moving average diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java index 176d4b06f3b..55cf6be073c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java @@ -32,7 +32,11 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; import java.io.IOException; import java.text.ParseException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; /** * Calculate a triple exponential weighted moving average diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java index f1755132072..4bfac9d44cb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.SearchParseException; import java.io.IOException; import java.text.ParseException; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java index 2a8f27d127c..a88730e582f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations.support; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; /** diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java index 0464dc8c1d8..b03bc8d6833 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java @@ -18,7 +18,13 @@ */ package org.elasticsearch.search.aggregations.support; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Scorer; import org.apache.lucene.util.Bits; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java index 506c9d16d7c..fced5fdc913 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java @@ -43,6 +43,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.joda.time.DateTimeZone; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -190,7 +191,7 @@ public class ValuesSourceParser { return config; } - MappedFieldType fieldType = context.smartNameFieldTypeFromAnyType(input.field); + MappedFieldType fieldType = context.smartNameFieldType(input.field); if (fieldType == null) { Class valuesSourceType = valueType != null ? (Class) valueType.getValuesSourceType() : this.valuesSourceType; ValuesSourceConfig config = new ValuesSourceConfig<>(valuesSourceType); @@ -227,7 +228,7 @@ public class ValuesSourceParser { } private SearchScript createScript() { - return input.script == null ? null : context.scriptService().search(context.lookup(), input.script, ScriptContext.Standard.AGGS); + return input.script == null ? null : context.scriptService().search(context.lookup(), input.script, ScriptContext.Standard.AGGS, Collections.emptyMap()); } private static ValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType) { diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 465729ca714..fe7e6064fc3 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -21,7 +21,7 @@ package org.elasticsearch.search.builder; import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; - +import org.elasticsearch.Version; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -91,6 +91,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ public static final ParseField RESCORE_FIELD = new ParseField("rescore"); public static final ParseField STATS_FIELD = new ParseField("stats"); public static final ParseField EXT_FIELD = new ParseField("ext"); + public static final ParseField PROFILE_FIELD = new ParseField("profile"); private static final SearchSourceBuilder PROTOTYPE = new SearchSourceBuilder(); @@ -158,6 +159,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ private BytesReference ext = null; + private boolean profile = false; + + /** * Constructs a new search source builder. */ @@ -475,6 +479,22 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ return this; } + /** + * Should the query be profiled. Defaults to false + */ + public SearchSourceBuilder profile(boolean profile) { + this.profile = profile; + return this; + } + + /** + * Return whether to profile query execution, or {@code null} if + * unspecified. + */ + public boolean profile() { + return profile; + } + /** * Gets the bytes representing the rescore builders for this request. */ @@ -723,6 +743,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ builder.fieldNames = fieldNames; } else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) { builder.sort(parser.text()); + } else if (context.parseFieldMatcher().match(currentFieldName, PROFILE_FIELD)) { + builder.profile = parser.booleanValue(); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); @@ -931,6 +953,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ builder.field(EXPLAIN_FIELD.getPreferredName(), explain); } + if (profile) { + builder.field("profile", true); + } + if (fetchSourceContext != null) { builder.field(_SOURCE_FIELD.getPreferredName(), fetchSourceContext); } @@ -1212,6 +1238,11 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ if (in.readBoolean()) { builder.ext = in.readBytesReference(); } + if (in.getVersion().onOrAfter(Version.V_2_2_0)) { + builder.profile = in.readBoolean(); + } else { + builder.profile = false; + } return builder; } @@ -1325,13 +1356,16 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ if (hasExt) { out.writeBytesReference(ext); } + if (out.getVersion().onOrAfter(Version.V_2_2_0)) { + out.writeBoolean(profile); + } } @Override public int hashCode() { return Objects.hash(aggregations, explain, fetchSourceContext, fieldDataFields, fieldNames, from, highlightBuilder, indexBoost, innerHitsBuilder, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, - size, sorts, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version); + size, sorts, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile); } @Override @@ -1364,6 +1398,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ && Objects.equals(terminateAfter, other.terminateAfter) && Objects.equals(timeoutInMillis, other.timeoutInMillis) && Objects.equals(trackScores, other.trackScores) - && Objects.equals(version, other.version); + && Objects.equals(version, other.version) + && Objects.equals(profile, other.profile); } } diff --git a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java index f76527163cb..ef16a03831d 100644 --- a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java @@ -43,7 +43,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.dfs.DfsSearchResult; @@ -52,6 +51,8 @@ import org.elasticsearch.search.fetch.FetchSearchResultProvider; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.profile.InternalProfileShardResults; +import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.suggest.Suggest; @@ -410,6 +411,17 @@ public class SearchPhaseController extends AbstractComponent { } } + //Collect profile results + InternalProfileShardResults shardResults = null; + if (!queryResults.isEmpty() && firstResult.profileResults() != null) { + Map> profileResults = new HashMap<>(queryResults.size()); + for (AtomicArray.Entry entry : queryResults) { + String key = entry.value.queryResult().shardTarget().toString(); + profileResults.put(key, entry.value.queryResult().profileResults()); + } + shardResults = new InternalProfileShardResults(profileResults); + } + if (aggregations != null) { List pipelineAggregators = firstResult.pipelineAggregators(); if (pipelineAggregators != null) { @@ -427,7 +439,7 @@ public class SearchPhaseController extends AbstractComponent { InternalSearchHits searchHits = new InternalSearchHits(hits.toArray(new InternalSearchHit[hits.size()]), totalHits, maxScore); - return new InternalSearchResponse(searchHits, aggregations, suggest, timedOut, terminatedEarly); + return new InternalSearchResponse(searchHits, aggregations, suggest, shardResults, timedOut, terminatedEarly); } } diff --git a/core/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java b/core/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java index d8330e5ec15..d762540caab 100644 --- a/core/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java +++ b/core/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.dfs; import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.index.Term; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; @@ -72,8 +71,8 @@ public class AggregatedDfs implements Streamable { termStatistics = HppcMaps.newMap(size); for (int i = 0; i < size; i++) { Term term = new Term(in.readString(), in.readBytesRef()); - TermStatistics stats = new TermStatistics(in.readBytesRef(), - in.readVLong(), + TermStatistics stats = new TermStatistics(in.readBytesRef(), + in.readVLong(), DfsSearchResult.subOne(in.readVLong())); termStatistics.put(term, stats); } @@ -84,7 +83,7 @@ public class AggregatedDfs implements Streamable { @Override public void writeTo(final StreamOutput out) throws IOException { out.writeVInt(termStatistics.size()); - + for (ObjectObjectCursor c : termStatistics()) { Term term = (Term) c.key; out.writeString(term.field()); diff --git a/core/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java b/core/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java index e1b98c413e1..fab8323d04d 100644 --- a/core/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java +++ b/core/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.dfs; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermContext; diff --git a/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java b/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java index c86af528bfc..6e93e410587 100644 --- a/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.dfs; import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.index.Term; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; @@ -127,12 +126,12 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes } this.termStatistics = readTermStats(in, terms); readFieldStats(in, fieldStatistics); - + maxDoc = in.readVInt(); } - + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -146,7 +145,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes writeFieldStats(out, fieldStatistics); out.writeVInt(maxDoc); } - + public static void writeFieldStats(StreamOutput out, ObjectObjectHashMap fieldStatistics) throws IOException { out.writeVInt(fieldStatistics.size()); @@ -160,20 +159,20 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes out.writeVLong(addOne(statistics.sumDocFreq())); } } - + public static void writeTermStats(StreamOutput out, TermStatistics[] termStatistics) throws IOException { out.writeVInt(termStatistics.length); for (TermStatistics termStatistic : termStatistics) { writeSingleTermStats(out, termStatistic); } } - + public static void writeSingleTermStats(StreamOutput out, TermStatistics termStatistic) throws IOException { assert termStatistic.docFreq() >= 0; out.writeVLong(termStatistic.docFreq()); - out.writeVLong(addOne(termStatistic.totalTermFreq())); + out.writeVLong(addOne(termStatistic.totalTermFreq())); } - + public static ObjectObjectHashMap readFieldStats(StreamInput in) throws IOException { return readFieldStats(in, null); } @@ -215,7 +214,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes return termStatistics; } - + /* * optional statistics are set to -1 in lucene by default. * Since we are using var longs to encode values we add one to each value @@ -225,8 +224,8 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes assert value + 1 >= 0; return value + 1; } - - + + /* * See #addOne this just subtracting one and asserts that the actual value * is positive. diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 227141e4ddf..5019bab9128 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitSet; import org.elasticsearch.ExceptionsHelper; @@ -31,7 +32,6 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.text.StringAndBytesText; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -55,7 +55,13 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.xcontent.XContentFactory.contentBuilder; @@ -198,7 +204,7 @@ public class FetchPhase implements SearchPhase { DocumentMapper documentMapper = context.mapperService().documentMapper(fieldsVisitor.uid().type()); Text typeText; if (documentMapper == null) { - typeText = new StringAndBytesText(fieldsVisitor.uid().type()); + typeText = new Text(fieldsVisitor.uid().type()); } else { typeText = documentMapper.typeText(); } @@ -309,11 +315,12 @@ public class FetchPhase implements SearchPhase { continue; } final Weight childWeight = context.searcher().createNormalizedWeight(childFilter, false); - DocIdSetIterator childIter = childWeight.scorer(subReaderContext); - if (childIter == null) { + Scorer childScorer = childWeight.scorer(subReaderContext); + if (childScorer == null) { current = nestedParentObjectMapper; continue; } + DocIdSetIterator childIter = childScorer.iterator(); BitSet parentBits = context.bitsetFilterCache().getBitSetProducer(parentFilter).getBitSet(subReaderContext); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FieldsParseElement.java b/core/src/main/java/org/elasticsearch/search/fetch/FieldsParseElement.java index 140d60d4eeb..74a37c7e8be 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FieldsParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FieldsParseElement.java @@ -20,9 +20,7 @@ package org.elasticsearch.search.fetch; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.SearchParseElement; -import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.internal.SearchContext; /** diff --git a/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java b/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java index 7bcfc045ca9..f3271f933fe 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; -import org.elasticsearch.transport.TransportResponse; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java index 803ab737cbc..0d524ed3e3e 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java @@ -22,9 +22,7 @@ package org.elasticsearch.search.fetch; import com.carrotsearch.hppc.IntArrayList; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; -import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.action.search.type.ParsedScrollId; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.Lucene; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsFetchSubPhase.java index c74ef7b0c73..16b1311809c 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsFetchSubPhase.java @@ -94,7 +94,7 @@ public class FieldDataFieldsFetchSubPhase implements FetchSubPhase { hitField = new InternalSearchHitField(field.name(), new ArrayList<>(2)); hitContext.hit().fields().put(field.name(), hitField); } - MappedFieldType fieldType = context.mapperService().smartNameFieldType(field.name()); + MappedFieldType fieldType = context.mapperService().fullName(field.name()); if (fieldType != null) { AtomicFieldData data = context.fieldData().getForField(fieldType).load(hitContext.readerContext()); ScriptDocValues values = data.getScriptValues(); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsParseElement.java b/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsParseElement.java index 3d1a2498f2f..4f746c47ec6 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsParseElement.java @@ -19,9 +19,7 @@ package org.elasticsearch.search.fetch.fielddata; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.fetch.FetchSubPhaseContext; import org.elasticsearch.search.fetch.FetchSubPhaseParseElement; import org.elasticsearch.search.internal.SearchContext; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java index e1884e36609..125563cd098 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java @@ -23,7 +23,19 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopDocsCollector; +import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; import org.elasticsearch.ExceptionsHelper; @@ -209,10 +221,11 @@ public final class InnerHitsContext { return null; } - final DocIdSetIterator childrenIterator = childWeight.scorer(context); - if (childrenIterator == null) { + final Scorer childrenScorer = childWeight.scorer(context); + if (childrenScorer == null) { return null; } + DocIdSetIterator childrenIterator = childrenScorer.iterator(); final DocIdSetIterator it = new DocIdSetIterator() { int doc = -1; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java index de5294f690f..6adb01ad10c 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java @@ -90,9 +90,9 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase { if (scorer == null) { continue; } - final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator(); + final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator(); if (twoPhase == null) { - if (scorer.advance(hitContext.docId()) == hitContext.docId()) { + if (scorer.iterator().advance(hitContext.docId()) == hitContext.docId()) { matchedQueries.add(name); } } else { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsParseElement.java b/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsParseElement.java index 6dbdcbd589a..de1703b5c98 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsParseElement.java @@ -30,6 +30,7 @@ import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.internal.SearchContext; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -97,9 +98,9 @@ public class ScriptFieldsParseElement implements SearchParseElement { throw new SearchParseException(context, "must specify a script in script fields", parser.getTokenLocation()); } - SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH); + SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); context.scriptFields().add(new ScriptFieldsContext.ScriptField(fieldName, searchScript, ignoreException)); } } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java index 9e86edef47d..8ad24b5cb19 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java @@ -82,7 +82,7 @@ public final class CustomQueryScorer extends QueryScorer { } else if (query instanceof FiltersFunctionScoreQuery) { query = ((FiltersFunctionScoreQuery) query).getSubQuery(); extract(query, query.getBoost(), terms); - } else { + } else if (terms.isEmpty()) { extractWeightedTerms(terms, query, query.getBoost()); } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java index 65702dd24b5..51c56e676c1 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java @@ -33,7 +33,7 @@ import org.apache.lucene.search.vectorhighlight.SimpleFieldFragList; import org.apache.lucene.search.vectorhighlight.SimpleFragListBuilder; import org.apache.lucene.search.vectorhighlight.SingleFragListBuilder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -151,15 +151,15 @@ public class FastVectorHighlighter implements Highlighter { // we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible // Only send matched fields if they were requested to save time. if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), field.fieldOptions().matchedFields(), fragmentCharSize, + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), field.fieldOptions().matchedFields(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } else { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), fragmentCharSize, + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } if (fragments != null && fragments.length > 0) { - return new HighlightField(highlighterContext.fieldName, StringText.convertFromStringArray(fragments)); + return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); } int noMatchSize = highlighterContext.field.fieldOptions().noMatchSize(); @@ -167,10 +167,10 @@ public class FastVectorHighlighter implements Highlighter { // Essentially we just request that a fragment is built from 0 to noMatchSize using the normal fragmentsBuilder FieldFragList fieldFragList = new SimpleFieldFragList(-1 /*ignored*/); fieldFragList.add(0, noMatchSize, Collections.emptyList()); - fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), + fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), fieldFragList, 1, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); if (fragments != null && fragments.length > 0) { - return new HighlightField(highlighterContext.fieldName, StringText.convertFromStringArray(fragments)); + return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightField.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightField.java index 9077278d515..30530b697f3 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightField.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightField.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.highlight; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import java.io.IOException; @@ -90,7 +89,7 @@ public class HighlightField implements Streamable { if (in.readBoolean()) { int size = in.readVInt(); if (size == 0) { - fragments = StringText.EMPTY_ARRAY; + fragments = Text.EMPTY_ARRAY; } else { fragments = new Text[size]; for (int i = 0; i < size; i++) { diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java index db64af82398..d4095c14ec0 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java @@ -48,9 +48,9 @@ public final class HighlightUtils { boolean forceSource = searchContext.highlight().forceSource(field); List textsToHighlight; if (!forceSource && mapper.fieldType().stored()) { - CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(mapper.fieldType().names().indexName()), false); + CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(mapper.fieldType().name()), false); hitContext.reader().document(hitContext.docId(), fieldVisitor); - textsToHighlight = fieldVisitor.fields().get(mapper.fieldType().names().indexName()); + textsToHighlight = fieldVisitor.fields().get(mapper.fieldType().name()); if (textsToHighlight == null) { // Can happen if the document doesn't have the field to highlight textsToHighlight = Collections.emptyList(); @@ -58,7 +58,7 @@ public final class HighlightUtils { } else { SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId()); - textsToHighlight = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); + textsToHighlight = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().name())); } assert textsToHighlight != null; return textsToHighlight; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java b/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java index 54366bee8c9..30b8d15d93d 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java @@ -22,7 +22,11 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; /** * An extensions point and registry for all the highlighters a node supports. diff --git a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java index 041ed754d76..4bd27e11795 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java @@ -33,9 +33,7 @@ import org.apache.lucene.search.highlight.SimpleSpanFragmenter; import org.apache.lucene.search.highlight.TextFragment; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; @@ -74,7 +72,7 @@ public class PlainHighlighter implements Highlighter { org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper); if (entry == null) { - QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.fieldType().names().indexName() : null); + QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.fieldType().name() : null); queryScorer.setExpandMultiTermQuery(true); Fragmenter fragmenter; if (field.fieldOptions().numberOfFragments() == 0) { @@ -110,7 +108,7 @@ public class PlainHighlighter implements Highlighter { for (Object textToHighlight : textsToHighlight) { String text = textToHighlight.toString(); - try (TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().names().indexName(), text)) { + try (TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().name(), text)) { if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { // can't perform highlighting if the stream has no terms (binary token stream) or no offsets continue; @@ -158,7 +156,7 @@ public class PlainHighlighter implements Highlighter { } if (fragments.length > 0) { - return new HighlightField(highlighterContext.fieldName, StringText.convertFromStringArray(fragments)); + return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); } int noMatchSize = highlighterContext.field.fieldOptions().noMatchSize(); @@ -167,12 +165,12 @@ public class PlainHighlighter implements Highlighter { String fieldContents = textsToHighlight.get(0).toString(); int end; try { - end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, mapper.fieldType().names().indexName(), fieldContents); + end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, mapper.fieldType().name(), fieldContents); } catch (Exception e) { throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } if (end > 0) { - return new HighlightField(highlighterContext.fieldName, new Text[] { new StringText(fieldContents.substring(0, end)) }); + return new HighlightField(highlighterContext.fieldName, new Text[] { new Text(fieldContents.substring(0, end)) }); } } return null; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java index e11840e89e7..51c460c5c68 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java @@ -28,7 +28,7 @@ import org.apache.lucene.search.postingshighlight.CustomSeparatorBreakIterator; import org.apache.lucene.search.postingshighlight.Snippet; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -93,7 +93,7 @@ public class PostingsHighlighter implements Highlighter { } IndexSearcher searcher = new IndexSearcher(hitContext.reader()); - Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.fieldType().names().indexName(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments); + Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.fieldType().name(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments); for (Snippet fieldSnippet : fieldSnippets) { if (Strings.hasText(fieldSnippet.getText())) { snippets.add(fieldSnippet); @@ -122,7 +122,7 @@ public class PostingsHighlighter implements Highlighter { } if (fragments.length > 0) { - return new HighlightField(highlighterContext.fieldName, StringText.convertFromStringArray(fragments)); + return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); } return null; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java index ee824ee13c3..363a3b9137d 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java @@ -27,7 +27,14 @@ import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo.S import org.apache.lucene.search.vectorhighlight.FragmentsBuilder; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.Version; -import org.elasticsearch.index.analysis.*; +import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.EdgeNGramTokenFilterFactory; +import org.elasticsearch.index.analysis.EdgeNGramTokenizerFactory; +import org.elasticsearch.index.analysis.NGramTokenFilterFactory; +import org.elasticsearch.index.analysis.NGramTokenizerFactory; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.WordDelimiterTokenFilterFactory; import org.elasticsearch.index.mapper.FieldMapper; import java.util.Comparator; @@ -41,14 +48,14 @@ public final class FragmentBuilderHelper { private FragmentBuilderHelper() { // no instance } - + /** * Fixes problems with broken analysis chains if positions and offsets are messed up that can lead to * {@link StringIndexOutOfBoundsException} in the {@link FastVectorHighlighter} */ public static WeightedFragInfo fixWeightedFragInfo(FieldMapper mapper, Field[] values, WeightedFragInfo fragInfo) { assert fragInfo != null : "FragInfo must not be null"; - assert mapper.fieldType().names().indexName().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name(); + assert mapper.fieldType().name().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name(); if (!fragInfo.getSubInfos().isEmpty() && (containsBrokenAnalysis(mapper.fieldType().indexAnalyzer()))) { /* This is a special case where broken analysis like WDF is used for term-vector creation at index-time * which can potentially mess up the offsets. To prevent a SAIIOBException we need to resort @@ -70,7 +77,7 @@ public final class FragmentBuilderHelper { return fragInfo; } } - + private static int compare(int x, int y) { return (x < y) ? -1 : ((x == y) ? 0 : 1); } @@ -82,19 +89,19 @@ public final class FragmentBuilderHelper { } if (analyzer instanceof CustomAnalyzer) { final CustomAnalyzer a = (CustomAnalyzer) analyzer; - if (a.tokenizerFactory() instanceof EdgeNGramTokenizerFactory - || (a.tokenizerFactory() instanceof NGramTokenizerFactory + if (a.tokenizerFactory() instanceof EdgeNGramTokenizerFactory + || (a.tokenizerFactory() instanceof NGramTokenizerFactory && !((NGramTokenizerFactory)a.tokenizerFactory()).version().onOrAfter(Version.LUCENE_4_2))) { // ngram tokenizer is broken before 4.2 return true; } TokenFilterFactory[] tokenFilters = a.tokenFilters(); for (TokenFilterFactory tokenFilterFactory : tokenFilters) { - if (tokenFilterFactory instanceof WordDelimiterTokenFilterFactory + if (tokenFilterFactory instanceof WordDelimiterTokenFilterFactory || tokenFilterFactory instanceof EdgeNGramTokenFilterFactory) { return true; } - if (tokenFilterFactory instanceof NGramTokenFilterFactory + if (tokenFilterFactory instanceof NGramTokenFilterFactory && !((NGramTokenFilterFactory)tokenFilterFactory).version().onOrAfter(Version.LUCENE_4_2)) { // ngram token filter is broken before 4.2 return true; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java index 71393fdf190..2d226aabf94 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java @@ -20,8 +20,8 @@ package org.elasticsearch.search.highlight.vectorhighlight; import org.apache.lucene.document.Field; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.vectorhighlight.BoundaryScanner; import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo; @@ -29,7 +29,6 @@ import org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -60,14 +59,14 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); - List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); + List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().name())); Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(mapper.fieldType().names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(mapper.fieldType().name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } - + @Override protected String makeFragment( StringBuilder buffer, int[] index, Field[] values, WeightedFragInfo fragInfo, String[] preTags, String[] postTags, Encoder encoder ){ diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java index 7a12b449c97..b80c239376e 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java @@ -20,13 +20,12 @@ package org.elasticsearch.search.highlight.vectorhighlight; import org.apache.lucene.document.Field; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.vectorhighlight.BoundaryScanner; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -56,15 +55,15 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder { SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); - List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); + List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().name())); if (values.isEmpty()) { return EMPTY_FIELDS; } Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(mapper.fieldType().names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(mapper.fieldType().name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } - + } diff --git a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 0a9b860edb7..c4df30716c7 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -22,10 +22,20 @@ package org.elasticsearch.search.internal; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermContext; -import org.apache.lucene.search.*; +import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryCache; +import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.TermStatistics; +import org.apache.lucene.search.Weight; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.search.dfs.AggregatedDfs; +import org.elasticsearch.search.profile.ProfileBreakdown; +import org.elasticsearch.search.profile.ProfileWeight; +import org.elasticsearch.search.profile.Profiler; import java.io.IOException; @@ -43,26 +53,44 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { private final Engine.Searcher engineSearcher; - public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) { + // TODO revisit moving the profiler to inheritance or wrapping model in the future + private Profiler profiler; + + public ContextIndexSearcher(Engine.Searcher searcher, + QueryCache queryCache, QueryCachingPolicy queryCachingPolicy) { super(searcher.reader()); in = searcher.searcher(); engineSearcher = searcher; setSimilarity(searcher.searcher().getSimilarity(true)); - setQueryCache(searchContext.getQueryCache()); - setQueryCachingPolicy(searchContext.indexShard().getQueryCachingPolicy()); + setQueryCache(queryCache); + setQueryCachingPolicy(queryCachingPolicy); } @Override public void close() { } + public void setProfiler(Profiler profiler) { + this.profiler = profiler; + } + public void setAggregatedDfs(AggregatedDfs aggregatedDfs) { this.aggregatedDfs = aggregatedDfs; } @Override public Query rewrite(Query original) throws IOException { - return in.rewrite(original); + if (profiler != null) { + profiler.startRewriteTime(); + } + + try { + return in.rewrite(original); + } finally { + if (profiler != null) { + profiler.stopAndAddRewriteTime(); + } + } } @Override @@ -72,8 +100,34 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { if (aggregatedDfs != null && needsScores) { // if scores are needed and we have dfs data then use it return super.createNormalizedWeight(query, needsScores); + } else if (profiler != null) { + // we need to use the createWeight method to insert the wrappers + return super.createNormalizedWeight(query, needsScores); + } else { + return in.createNormalizedWeight(query, needsScores); + } + } + + @Override + public Weight createWeight(Query query, boolean needsScores) throws IOException { + if (profiler != null) { + // createWeight() is called for each query in the tree, so we tell the queryProfiler + // each invocation so that it can build an internal representation of the query + // tree + ProfileBreakdown profile = profiler.getQueryBreakdown(query); + profile.startTime(ProfileBreakdown.TimingType.CREATE_WEIGHT); + final Weight weight; + try { + weight = super.createWeight(query, needsScores); + } finally { + profile.stopAndRecordTime(); + profiler.pollLastQuery(); + } + return new ProfileWeight(query, weight, profile); + } else { + // needs to be 'super', not 'in' in order to use aggregated DFS + return super.createWeight(query, needsScores); } - return in.createNormalizedWeight(query, needsScores); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 1174fcdd8a9..c3eef7559a0 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -20,7 +20,12 @@ package org.elasticsearch.search.internal; import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; @@ -35,7 +40,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; @@ -57,13 +61,18 @@ import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QueryPhaseExecutionException; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * @@ -129,10 +138,10 @@ public class DefaultSearchContext extends SearchContext { private List rescore; private SearchLookup searchLookup; private volatile long keepAlive; - private ScoreDoc lastEmittedDoc; private final long originNanoTime = System.nanoTime(); private volatile long lastAccessTime = -1; private InnerHitsContext innerHitsContext; + private Profilers profilers; private final Map subPhaseContexts = new HashMap<>(); private final Map, Collector> queryCollectors = new HashMap<>(); @@ -158,7 +167,7 @@ public class DefaultSearchContext extends SearchContext { this.fetchResult = new FetchSearchResult(id, shardTarget); this.indexShard = indexShard; this.indexService = indexService; - this.searcher = new ContextIndexSearcher(this, engineSearcher); + this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy()); this.timeEstimateCounter = timeEstimateCounter; this.timeoutInMillis = timeout.millis(); } @@ -690,17 +699,12 @@ public class DefaultSearchContext extends SearchContext { @Override public MappedFieldType smartNameFieldType(String name) { - return mapperService().smartNameFieldType(name, request.types()); - } - - @Override - public MappedFieldType smartNameFieldTypeFromAnyType(String name) { - return mapperService().smartNameFieldType(name); + return mapperService().fullName(name); } @Override public ObjectMapper getObjectMapper(String name) { - return mapperService().getObjectMapper(name, request.types()); + return mapperService().getObjectMapper(name); } @Override @@ -724,5 +728,11 @@ public class DefaultSearchContext extends SearchContext { } @Override - public QueryCache getQueryCache() { return indexService.cache().query();} + public Profilers getProfilers() { + return profilers; + } + + public void setProfilers(Profilers profilers) { + this.profilers = profilers; + } } diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index 7225c7b32bd..eaa14933b33 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; @@ -49,6 +48,7 @@ import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -496,11 +496,6 @@ public abstract class FilteredSearchContext extends SearchContext { return in.smartNameFieldType(name); } - @Override - public MappedFieldType smartNameFieldTypeFromAnyType(String name) { - return in.smartNameFieldTypeFromAnyType(name); - } - @Override public ObjectMapper getObjectMapper(String name) { return in.getObjectMapper(name); @@ -517,8 +512,11 @@ public abstract class FilteredSearchContext extends SearchContext { } @Override - public Map, Collector> queryCollectors() { return in.queryCollectors();} + public Profilers getProfilers() { + return in.getProfilers(); + } @Override - public QueryCache getQueryCache() { return in.getQueryCache();} + public Map, Collector> queryCollectors() { return in.queryCollectors();} + } diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java index 96fd103fa6f..c6afe325bb3 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.text.StringAndBytesText; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -104,14 +103,14 @@ public class InternalSearchHit implements SearchHit { public InternalSearchHit(int docId, String id, Text type, Map fields) { this.docId = docId; - this.id = new StringAndBytesText(id); + this.id = new Text(id); this.type = type; this.fields = fields; } public InternalSearchHit(int nestedTopDocId, String id, Text type, InternalNestedIdentity nestedIdentity, Map fields) { this.docId = nestedTopDocId; - this.id = new StringAndBytesText(id); + this.id = new Text(id); this.type = type; this.nestedIdentity = nestedIdentity; this.fields = fields; @@ -339,7 +338,7 @@ public class InternalSearchHit implements SearchHit { if (sortValues != null) { for (int i = 0; i < sortValues.length; i++) { if (sortValues[i] instanceof BytesRef) { - sortValuesCopy[i] = new StringAndBytesText(new BytesArray((BytesRef) sortValues[i])); + sortValuesCopy[i] = new Text(new BytesArray((BytesRef) sortValues[i])); } } } @@ -783,7 +782,7 @@ public class InternalSearchHit implements SearchHit { private InternalNestedIdentity child; public InternalNestedIdentity(String field, int offset, InternalNestedIdentity child) { - this.field = new StringAndBytesText(field); + this.field = new Text(field); this.offset = offset; this.child = child; } @@ -845,4 +844,4 @@ public class InternalSearchHit implements SearchHit { } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java index 7b73772f9da..b8255e0bb52 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java @@ -28,9 +28,14 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.profile.InternalProfileShardResults; +import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.search.internal.InternalSearchHits.readSearchHits; @@ -40,7 +45,7 @@ import static org.elasticsearch.search.internal.InternalSearchHits.readSearchHit public class InternalSearchResponse implements Streamable, ToXContent { public static InternalSearchResponse empty() { - return new InternalSearchResponse(InternalSearchHits.empty(), null, null, false, null); + return new InternalSearchResponse(InternalSearchHits.empty(), null, null, null, false, null); } private InternalSearchHits hits; @@ -49,6 +54,8 @@ public class InternalSearchResponse implements Streamable, ToXContent { private Suggest suggest; + private InternalProfileShardResults profileResults; + private boolean timedOut; private Boolean terminatedEarly = null; @@ -56,10 +63,12 @@ public class InternalSearchResponse implements Streamable, ToXContent { private InternalSearchResponse() { } - public InternalSearchResponse(InternalSearchHits hits, InternalAggregations aggregations, Suggest suggest, boolean timedOut, Boolean terminatedEarly) { + public InternalSearchResponse(InternalSearchHits hits, InternalAggregations aggregations, Suggest suggest, + InternalProfileShardResults profileResults, boolean timedOut, Boolean terminatedEarly) { this.hits = hits; this.aggregations = aggregations; this.suggest = suggest; + this.profileResults = profileResults; this.timedOut = timedOut; this.terminatedEarly = terminatedEarly; } @@ -84,6 +93,19 @@ public class InternalSearchResponse implements Streamable, ToXContent { return suggest; } + /** + * Returns the profile results for this search response (including all shards). + * An empty map is returned if profiling was not enabled + * + * @return Profile results + */ + public Map> profile() { + if (profileResults == null) { + return Collections.emptyMap(); + } + return profileResults.getShardResults(); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { hits.toXContent(builder, params); @@ -93,6 +115,9 @@ public class InternalSearchResponse implements Streamable, ToXContent { if (suggest != null) { suggest.toXContent(builder, params); } + if (profileResults != null) { + profileResults.toXContent(builder, params); + } return builder; } @@ -114,6 +139,12 @@ public class InternalSearchResponse implements Streamable, ToXContent { timedOut = in.readBoolean(); terminatedEarly = in.readOptionalBoolean(); + + if (in.getVersion().onOrAfter(Version.V_2_2_0) && in.readBoolean()) { + profileResults = new InternalProfileShardResults(in); + } else { + profileResults = null; + } } @Override @@ -134,5 +165,14 @@ public class InternalSearchResponse implements Streamable, ToXContent { out.writeBoolean(timedOut); out.writeOptionalBoolean(terminatedEarly); + + if (out.getVersion().onOrAfter(Version.V_2_2_0)) { + if (profileResults == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + profileResults.writeTo(out); + } + } } } diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 0f61b2bc6a3..76164b5c0f8 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; @@ -56,11 +55,15 @@ import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; public abstract class SearchContext extends DelegatingHasContextAndHeaders implements Releasable { @@ -303,6 +306,11 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple public abstract FetchSearchResult fetchResult(); + /** + * Return a handle over the profilers for the current search request, or {@code null} if profiling is not enabled. + */ + public abstract Profilers getProfilers(); + /** * Schedule the release of a resource. The time when {@link Releasable#close()} will be called on this object * is function of the provided {@link Lifetime}. @@ -335,12 +343,10 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple } } - public abstract MappedFieldType smartNameFieldType(String name); - /** * Looks up the given field, but does not restrict to fields in the types set on this context. */ - public abstract MappedFieldType smartNameFieldTypeFromAnyType(String name); + public abstract MappedFieldType smartNameFieldType(String name); public abstract ObjectMapper getObjectMapper(String name); @@ -367,5 +373,4 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple CONTEXT } - public abstract QueryCache getQueryCache(); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java index 47791aeddfa..9d15dfd5790 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java @@ -71,6 +71,8 @@ public class ShardSearchLocalRequest extends ContextAndHeaderHolder implements S private Boolean requestCache; private long nowInMillis; + private boolean profile; + ShardSearchLocalRequest() { } @@ -165,6 +167,16 @@ public class ShardSearchLocalRequest extends ContextAndHeaderHolder implements S return scroll; } + @Override + public void setProfile(boolean profile) { + this.profile = profile; + } + + @Override + public boolean isProfile() { + return profile; + } + @SuppressWarnings("unchecked") protected void innerReadFrom(StreamInput in) throws IOException { index = in.readString(); diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index fb631b08270..b1730b6a14e 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -59,6 +59,17 @@ public interface ShardSearchRequest extends HasContextAndHeaders { Scroll scroll(); + /** + * Sets if this shard search needs to be profiled or not + * @param profile True if the shard should be profiled + */ + void setProfile(boolean profile); + + /** + * Returns true if this shard search is being profiled or not + */ + boolean isProfile(); + /** * Returns the cache key for this shard search request, based on its content */ diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java index 279d9d6bd20..0f9c0ced411 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java @@ -150,4 +150,14 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha public BytesReference cacheKey() throws IOException { return shardSearchLocalRequest.cacheKey(); } + + @Override + public void setProfile(boolean profile) { + shardSearchLocalRequest.setProfile(profile); + } + + @Override + public boolean isProfile() { + return shardSearchLocalRequest.isProfile(); + } } diff --git a/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java b/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java index 07f72379764..249a23b9bfc 100644 --- a/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java +++ b/core/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.search.lookup; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import java.util.ArrayList; @@ -86,7 +85,7 @@ public class FieldLookup { } valueLoaded = true; value = null; - List values = fields.get(fieldType.names().indexName()); + List values = fields.get(fieldType.name()); return values != null ? value = values.get(0) : null; } @@ -96,6 +95,6 @@ public class FieldLookup { } valuesLoaded = true; values.clear(); - return values = fields().get(fieldType.names().indexName()); + return values = fields().get(fieldType.name()); } } diff --git a/core/src/main/java/org/elasticsearch/search/lookup/IndexField.java b/core/src/main/java/org/elasticsearch/search/lookup/IndexField.java index 490180e6925..21803983d29 100644 --- a/core/src/main/java/org/elasticsearch/search/lookup/IndexField.java +++ b/core/src/main/java/org/elasticsearch/search/lookup/IndexField.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.lookup; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.search.CollectionStatistics; import org.elasticsearch.common.util.MinimalMap; @@ -93,7 +92,7 @@ public class IndexField extends MinimalMap { /* * Returns a TermInfo object that can be used to access information on * specific terms. flags can be set as described in TermInfo. - * + * * TODO: here might be potential for running time improvement? If we knew in * advance which terms are requested, we could provide an array which the * user could then iterate over. diff --git a/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java b/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java index 3864581034e..db20a03f825 100644 --- a/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java +++ b/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java @@ -75,7 +75,7 @@ public class LeafDocLookup implements Map { String fieldName = key.toString(); ScriptDocValues scriptValues = localCacheFieldData.get(fieldName); if (scriptValues == null) { - final MappedFieldType fieldType = mapperService.smartNameFieldType(fieldName, types); + final MappedFieldType fieldType = mapperService.fullName(fieldName); if (fieldType == null) { throw new IllegalArgumentException("No field found for [" + fieldName + "] in mapping with types " + Arrays.toString(types) + ""); } @@ -99,7 +99,7 @@ public class LeafDocLookup implements Map { String fieldName = key.toString(); ScriptDocValues scriptValues = localCacheFieldData.get(fieldName); if (scriptValues == null) { - MappedFieldType fieldType = mapperService.smartNameFieldType(fieldName, types); + MappedFieldType fieldType = mapperService.fullName(fieldName); if (fieldType == null) { return false; } diff --git a/core/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java b/core/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java index e5295e80fb3..a5f90aa2c90 100644 --- a/core/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java +++ b/core/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java @@ -136,7 +136,7 @@ public class LeafFieldsLookup implements Map { private FieldLookup loadFieldData(String name) { FieldLookup data = cachedFieldData.get(name); if (data == null) { - MappedFieldType fieldType = mapperService.smartNameFieldType(name, types); + MappedFieldType fieldType = mapperService.fullName(name); if (fieldType == null) { throw new IllegalArgumentException("No field found for [" + name + "] in mapping with types " + Arrays.toString(types) + ""); } @@ -144,12 +144,12 @@ public class LeafFieldsLookup implements Map { cachedFieldData.put(name, data); } if (data.fields() == null) { - String fieldName = data.fieldType().names().indexName(); + String fieldName = data.fieldType().name(); fieldVisitor.reset(fieldName); try { reader.document(docId, fieldVisitor); fieldVisitor.postProcess(data.fieldType()); - data.fields(singletonMap(name, fieldVisitor.fields().get(data.fieldType().names().indexName()))); + data.fields(singletonMap(name, fieldVisitor.fields().get(data.fieldType().name()))); } catch (IOException e) { throw new ElasticsearchParseException("failed to load field [{}]", e, name); } diff --git a/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java new file mode 100644 index 00000000000..8da14d23d96 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; + +/** + * Public interface and serialization container for profiled timings of the + * Collectors used in the search. Children CollectorResult's may be + * embedded inside of a parent CollectorResult + */ +public class CollectorResult implements ToXContent, Writeable { + + public static final String REASON_SEARCH_COUNT = "search_count"; + public static final String REASON_SEARCH_TOP_HITS = "search_top_hits"; + public static final String REASON_SEARCH_TERMINATE_AFTER_COUNT = "search_terminate_after_count"; + public static final String REASON_SEARCH_POST_FILTER = "search_post_filter"; + public static final String REASON_SEARCH_MIN_SCORE = "search_min_score"; + public static final String REASON_SEARCH_MULTI = "search_multi"; + public static final String REASON_SEARCH_TIMEOUT = "search_timeout"; + public static final String REASON_AGGREGATION = "aggregation"; + public static final String REASON_AGGREGATION_GLOBAL = "aggregation_global"; + + private static final ParseField NAME = new ParseField("name"); + private static final ParseField REASON = new ParseField("reason"); + private static final ParseField TIME = new ParseField("time"); + private static final ParseField CHILDREN = new ParseField("children"); + + /** + * A more friendly representation of the Collector's class name + */ + private final String collectorName; + + /** + * A "hint" to help provide some context about this Collector + */ + private final String reason; + + /** + * The total elapsed time for this Collector + */ + private final Long time; + + /** + * A list of children collectors "embedded" inside this collector + */ + private List children; + + public CollectorResult(String collectorName, String reason, Long time, List children) { + this.collectorName = collectorName; + this.reason = reason; + this.time = time; + this.children = children; + } + + public CollectorResult(StreamInput in) throws IOException { + this.collectorName = in.readString(); + this.reason = in.readString(); + this.time = in.readLong(); + int size = in.readVInt(); + this.children = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + CollectorResult child = new CollectorResult(in); + this.children.add(child); + } + } + + /** + * @return the profiled time for this collector (inclusive of children) + */ + public long getTime() { + return this.time; + } + + /** + * @return a human readable "hint" about what this collector was used for + */ + public String getReason() { + return this.reason; + } + + /** + * @return the lucene class name of the collector + */ + public String getName() { + return this.collectorName; + } + + /** + * @return a list of children collectors + */ + public List getProfiledChildren() { + return children; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder = builder.startObject() + .field(NAME.getPreferredName(), getName()) + .field(REASON.getPreferredName(), getReason()) + .field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", (double) (getTime() / 1000000.0))); + + if (!children.isEmpty()) { + builder = builder.startArray(CHILDREN.getPreferredName()); + for (CollectorResult child : children) { + builder = child.toXContent(builder, params); + } + builder = builder.endArray(); + } + builder = builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(collectorName); + out.writeString(reason); + out.writeLong(time); + out.writeVInt(children.size()); + for (CollectorResult child : children) { + child.writeTo(out); + } + } + + @Override + public Object readFrom(StreamInput in) throws IOException { + return new CollectorResult(in); + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileCollector.java b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileCollector.java new file mode 100644 index 00000000000..33d11a1aadc --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileCollector.java @@ -0,0 +1,135 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.LeafCollector; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * This class wraps a Lucene Collector and times the execution of: + * - setScorer() + * - collect() + * - doSetNextReader() + * - needsScores() + * + * InternalProfiler facilitates the linking of the the Collector graph + */ +public class InternalProfileCollector implements Collector { + + /** + * A more friendly representation of the Collector's class name + */ + private final String collectorName; + + /** + * A "hint" to help provide some context about this Collector + */ + private final String reason; + + /** The wrapped collector */ + private final ProfileCollector collector; + + /** + * A list of "embedded" children collectors + */ + private final List children; + + public InternalProfileCollector(Collector collector, String reason, List children) { + this.collector = new ProfileCollector(collector); + this.reason = reason; + this.collectorName = deriveCollectorName(collector); + this.children = children; + } + + /** + * @return the profiled time for this collector (inclusive of children) + */ + public long getTime() { + return collector.getTime(); + } + + /** + * @return a human readable "hint" about what this collector was used for + */ + public String getReason() { + return this.reason; + } + + /** + * @return the lucene class name of the collector + */ + public String getName() { + return this.collectorName; + } + + /** + * Creates a human-friendly representation of the Collector name. + * + * InternalBucket Collectors use the aggregation name in their toString() method, + * which makes the profiled output a bit nicer. + * + * @param c The Collector to derive a name from + * @return A (hopefully) prettier name + */ + private String deriveCollectorName(Collector c) { + String s = c.getClass().getSimpleName(); + + // MutiCollector which wraps multiple BucketCollectors is generated + // via an anonymous class, so this corrects the lack of a name by + // asking the enclosingClass + if (s.equals("")) { + s = c.getClass().getEnclosingClass().getSimpleName(); + } + + // Aggregation collector toString()'s include the user-defined agg name + if (reason.equals(CollectorResult.REASON_AGGREGATION) || reason.equals(CollectorResult.REASON_AGGREGATION_GLOBAL)) { + s += ": [" + c.toString() + "]"; + } + return s; + } + + @Override + public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { + return collector.getLeafCollector(context); + } + + @Override + public boolean needsScores() { + return collector.needsScores(); + } + + public CollectorResult getCollectorTree() { + return InternalProfileCollector.doGetCollectorTree(this); + } + + private static CollectorResult doGetCollectorTree(InternalProfileCollector collector) { + List childResults = new ArrayList<>(collector.children.size()); + for (InternalProfileCollector child : collector.children) { + CollectorResult result = doGetCollectorTree(child); + childResults.add(result); + } + return new CollectorResult(collector.getName(), collector.getReason(), collector.getTime(), childResults); + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java new file mode 100644 index 00000000000..fe94aea1a0a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileShardResults.java @@ -0,0 +1,113 @@ +package org.elasticsearch.search.profile; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * A container class to hold all the profile results across all shards. Internally + * holds a map of shard ID -> Profiled results + */ +public final class InternalProfileShardResults implements Writeable, ToXContent{ + + private Map> shardResults; + + public InternalProfileShardResults(Map> shardResults) { + Map> transformed = + shardResults.entrySet() + .stream() + .collect(Collectors.toMap( + Map.Entry::getKey, + e -> Collections.unmodifiableList(e.getValue())) + ); + this.shardResults = Collections.unmodifiableMap(transformed); + } + + public InternalProfileShardResults(StreamInput in) throws IOException { + int size = in.readInt(); + shardResults = new HashMap<>(size); + + for (int i = 0; i < size; i++) { + String key = in.readString(); + int shardResultsSize = in.readInt(); + + List shardResult = new ArrayList<>(shardResultsSize); + + for (int j = 0; j < shardResultsSize; j++) { + ProfileShardResult result = new ProfileShardResult(in); + shardResult.add(result); + } + shardResults.put(key, Collections.unmodifiableList(shardResult)); + } + shardResults = Collections.unmodifiableMap(shardResults); + } + + public Map> getShardResults() { + return this.shardResults; + } + + @Override + public InternalProfileShardResults readFrom(StreamInput in) throws IOException { + return new InternalProfileShardResults(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeInt(shardResults.size()); + for (Map.Entry> entry : shardResults.entrySet()) { + out.writeString(entry.getKey()); + out.writeInt(entry.getValue().size()); + + for (ProfileShardResult result : entry.getValue()) { + result.writeTo(out); + } + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("profile").startArray("shards"); + + for (Map.Entry> entry : shardResults.entrySet()) { + builder.startObject().field("id",entry.getKey()).startArray("searches"); + for (ProfileShardResult result : entry.getValue()) { + builder.startObject(); + result.toXContent(builder, params); + builder.endObject(); + } + builder.endArray().endObject(); + } + + builder.endArray().endObject(); + return builder; + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/InternalProfileTree.java b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileTree.java new file mode 100644 index 00000000000..d7afd9c2fa0 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/InternalProfileTree.java @@ -0,0 +1,239 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.search.Query; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Deque; +import java.util.List; +import java.util.Map; +import java.util.concurrent.LinkedBlockingDeque; + +/** + * This class tracks the dependency tree for queries (scoring and rewriting) and + * generates {@link ProfileBreakdown} for each node in the tree. It also finalizes the tree + * and returns a list of {@link ProfileResult} that can be serialized back to the client + */ +final class InternalProfileTree { + + private ArrayList timings; + + /** Maps the Query to it's list of children. This is basically the dependency tree */ + private ArrayList> tree; + + /** A list of the original queries, keyed by index position */ + private ArrayList queries; + + /** A list of top-level "roots". Each root can have its own tree of profiles */ + private ArrayList roots; + + /** Rewrite time */ + private long rewriteTime; + private long rewriteScratch; + + /** A temporary stack used to record where we are in the dependency tree. Only used by scoring queries */ + private Deque stack; + + private int currentToken = 0; + + public InternalProfileTree() { + timings = new ArrayList<>(10); + stack = new LinkedBlockingDeque<>(10); + tree = new ArrayList<>(10); + queries = new ArrayList<>(10); + roots = new ArrayList<>(10); + } + + /** + * Returns a {@link ProfileBreakdown} for a scoring query. Scoring queries (e.g. those + * that are past the rewrite phase and are now being wrapped by createWeight() ) follow + * a recursive progression. We can track the dependency tree by a simple stack + * + * The only hiccup is that the first scoring query will be identical to the last rewritten + * query, so we need to take special care to fix that + * + * @param query The scoring query we wish to profile + * @return A ProfileBreakdown for this query + */ + public ProfileBreakdown getQueryBreakdown(Query query) { + int token = currentToken; + + boolean stackEmpty = stack.isEmpty(); + + // If the stack is empty, we are a new root query + if (stackEmpty) { + + // We couldn't find a rewritten query to attach to, so just add it as a + // top-level root. This is just a precaution: it really shouldn't happen. + // We would only get here if a top-level query that never rewrites for some reason. + roots.add(token); + + // Increment the token since we are adding a new node, but notably, do not + // updateParent() because this was added as a root + currentToken += 1; + stack.add(token); + + return addDependencyNode(query, token); + } + + updateParent(token); + + // Increment the token since we are adding a new node + currentToken += 1; + stack.add(token); + + return addDependencyNode(query, token); + } + + /** + * Begin timing a query for a specific Timing context + */ + public void startRewriteTime() { + assert rewriteScratch == 0; + rewriteScratch = System.nanoTime(); + } + + /** + * Halt the timing process and add the elapsed rewriting time. + * startRewriteTime() must be called for a particular context prior to calling + * stopAndAddRewriteTime(), otherwise the elapsed time will be negative and + * nonsensical + * + * @return The elapsed time + */ + public long stopAndAddRewriteTime() { + long time = Math.max(1, System.nanoTime() - rewriteScratch); + rewriteTime += time; + rewriteScratch = 0; + return time; + } + + /** + * Helper method to add a new node to the dependency tree. + * + * Initializes a new list in the dependency tree, saves the query and + * generates a new {@link ProfileBreakdown} to track the timings + * of this query + * + * @param query The query to profile + * @param token The assigned token for this query + * @return A ProfileBreakdown to profile this query + */ + private ProfileBreakdown addDependencyNode(Query query, int token) { + + // Add a new slot in the dependency tree + tree.add(new ArrayList<>(5)); + + // Save our query for lookup later + queries.add(query); + + ProfileBreakdown queryTimings = new ProfileBreakdown(); + timings.add(token, queryTimings); + return queryTimings; + } + + /** + * Removes the last (e.g. most recent) value on the stack + */ + public void pollLast() { + stack.pollLast(); + } + + /** + * After the query has been run and profiled, we need to merge the flat timing map + * with the dependency graph to build a data structure that mirrors the original + * query tree + * + * @return a hierarchical representation of the profiled query tree + */ + public List getQueryTree() { + ArrayList results = new ArrayList<>(5); + for (Integer root : roots) { + results.add(doGetQueryTree(root)); + } + return results; + } + + /** + * Recursive helper to finalize a node in the dependency tree + * @param token The node we are currently finalizing + * @return A hierarchical representation of the tree inclusive of children at this level + */ + private ProfileResult doGetQueryTree(int token) { + Query query = queries.get(token); + ProfileBreakdown breakdown = timings.get(token); + Map timings = breakdown.toTimingMap(); + List children = tree.get(token); + List childrenProfileResults = Collections.emptyList(); + + if (children != null) { + childrenProfileResults = new ArrayList<>(children.size()); + for (Integer child : children) { + ProfileResult childNode = doGetQueryTree(child); + childrenProfileResults.add(childNode); + } + } + + // TODO this would be better done bottom-up instead of top-down to avoid + // calculating the same times over and over...but worth the effort? + long nodeTime = getNodeTime(timings, childrenProfileResults); + String queryDescription = query.getClass().getSimpleName(); + String luceneName = query.toString(); + return new ProfileResult(queryDescription, luceneName, timings, childrenProfileResults, nodeTime); + } + + public long getRewriteTime() { + return rewriteTime; + } + + /** + * Internal helper to add a child to the current parent node + * + * @param childToken The child to add to the current parent + */ + private void updateParent(int childToken) { + Integer parent = stack.peekLast(); + ArrayList parentNode = tree.get(parent); + parentNode.add(childToken); + tree.set(parent, parentNode); + } + + /** + * Internal helper to calculate the time of a node, inclusive of children + * + * @param timings A map of breakdown timing for the node + * @param children All children profile results at this node + * @return The total time at this node, inclusive of children + */ + private static long getNodeTime(Map timings, List children) { + long nodeTime = 0; + for (long time : timings.values()) { + nodeTime += time; + } + + // Then add up our children + for (ProfileResult child : children) { + nodeTime += getNodeTime(child.getTimeBreakdown(), child.getProfiledChildren()); + } + return nodeTime; + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileBreakdown.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileBreakdown.java new file mode 100644 index 00000000000..55ad77b6937 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileBreakdown.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +/** + * A record of timings for the various operations that may happen during query execution. + * A node's time may be composed of several internal attributes (rewriting, weighting, + * scoring, etc). + */ +public final class ProfileBreakdown { + + /** Enumeration of all supported timing types. */ + public enum TimingType { + CREATE_WEIGHT, + BUILD_SCORER, + NEXT_DOC, + ADVANCE, + MATCH, + SCORE; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + } + + /** + * The accumulated timings for this query node + */ + private final long[] timings; + + /** Scrach to store the current timing type. */ + private TimingType currentTimingType; + + /** + * The temporary scratch space for holding start-times + */ + private long scratch; + + /** Sole constructor. */ + public ProfileBreakdown() { + timings = new long[TimingType.values().length]; + } + + /** + * Begin timing a query for a specific Timing context + * @param timing The timing context being profiled + */ + public void startTime(TimingType timing) { + assert currentTimingType == null; + assert scratch == 0; + currentTimingType = timing; + scratch = System.nanoTime(); + } + + /** + * Halt the timing process and save the elapsed time. + * startTime() must be called for a particular context prior to calling + * stopAndRecordTime(), otherwise the elapsed time will be negative and + * nonsensical + * + * @return The elapsed time + */ + public long stopAndRecordTime() { + long time = Math.max(1, System.nanoTime() - scratch); + timings[currentTimingType.ordinal()] += time; + currentTimingType = null; + scratch = 0L; + return time; + } + + /** Convert this record to a map from {@link TimingType} to times. */ + public Map toTimingMap() { + Map map = new HashMap<>(); + for (TimingType timingType : TimingType.values()) { + map.put(timingType.toString(), timings[timingType.ordinal()]); + } + return Collections.unmodifiableMap(map); + } + + /** + * Add other's timings into this breakdown + * @param other Another Breakdown to merge with this one + */ + public void merge(ProfileBreakdown other) { + assert(timings.length == other.timings.length); + for (int i = 0; i < timings.length; ++i) { + timings[i] += other.timings[i]; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileCollector.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileCollector.java new file mode 100644 index 00000000000..7d7538c9117 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileCollector.java @@ -0,0 +1,94 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.FilterCollector; +import org.apache.lucene.search.FilterLeafCollector; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Scorer; + +import java.io.IOException; + +/** A collector that profiles how much time is spent calling it. */ +final class ProfileCollector extends FilterCollector { + + private long time; + + /** Sole constructor. */ + public ProfileCollector(Collector in) { + super(in); + } + + /** Return the wrapped collector. */ + public Collector getDelegate() { + return in; + } + + @Override + public boolean needsScores() { + final long start = System.nanoTime(); + try { + return super.needsScores(); + } finally { + time += Math.max(1, System.nanoTime() - start); + } + } + + @Override + public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { + final long start = System.nanoTime(); + final LeafCollector inLeafCollector; + try { + inLeafCollector = super.getLeafCollector(context); + } finally { + time += Math.max(1, System.nanoTime() - start); + } + return new FilterLeafCollector(inLeafCollector) { + + @Override + public void collect(int doc) throws IOException { + final long start = System.nanoTime(); + try { + super.collect(doc); + } finally { + time += Math.max(1, System.nanoTime() - start); + } + } + + @Override + public void setScorer(Scorer scorer) throws IOException { + final long start = System.nanoTime(); + try { + super.setScorer(scorer); + } finally { + time += Math.max(1, System.nanoTime() - start); + } + } + }; + } + + /** Return the total time spent on this collector. */ + public long getTime() { + return time; + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileResult.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileResult.java new file mode 100644 index 00000000000..4c8752fdaf2 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileResult.java @@ -0,0 +1,165 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +/** + * This class is the internal representation of a profiled Query, corresponding + * to a single node in the query tree. It is built after the query has finished executing + * and is merely a structured representation, rather than the entity that collects the timing + * profile (see InternalProfiler for that) + * + * Each InternalProfileResult has a List of InternalProfileResults, which will contain + * "children" queries if applicable + */ +final class ProfileResult implements Writeable, ToXContent { + + private static final ParseField QUERY_TYPE = new ParseField("query_type"); + private static final ParseField LUCENE_DESCRIPTION = new ParseField("lucene"); + private static final ParseField NODE_TIME = new ParseField("time"); + private static final ParseField CHILDREN = new ParseField("children"); + private static final ParseField BREAKDOWN = new ParseField("breakdown"); + + private final String queryType; + private final String luceneDescription; + private final Map timings; + private final long nodeTime; + private final List children; + + public ProfileResult(String queryType, String luceneDescription, Map timings, List children, long nodeTime) { + this.queryType = queryType; + this.luceneDescription = luceneDescription; + this.timings = timings; + this.children = children; + this.nodeTime = nodeTime; + } + + public ProfileResult(StreamInput in) throws IOException{ + this.queryType = in.readString(); + this.luceneDescription = in.readString(); + this.nodeTime = in.readLong(); + + int timingsSize = in.readVInt(); + this.timings = new HashMap<>(timingsSize); + for (int i = 0; i < timingsSize; ++i) { + timings.put(in.readString(), in.readLong()); + } + + int size = in.readVInt(); + this.children = new ArrayList<>(size); + + for (int i = 0; i < size; i++) { + children.add(new ProfileResult(in)); + } + } + + /** + * Retrieve the lucene description of this query (e.g. the "explain" text) + */ + public String getLuceneDescription() { + return luceneDescription; + } + + /** + * Retrieve the name of the query (e.g. "TermQuery") + */ + public String getQueryName() { + return queryType; + } + + /** + * Returns the timing breakdown for this particular query node + */ + public Map getTimeBreakdown() { + return Collections.unmodifiableMap(timings); + } + + /** + * Returns the total time (inclusive of children) for this query node. + * + * @return elapsed time in nanoseconds + */ + public long getTime() { + return nodeTime; + } + + /** + * Returns a list of all profiled children queries + */ + public List getProfiledChildren() { + return Collections.unmodifiableList(children); + } + + @Override + public ProfileResult readFrom(StreamInput in) throws IOException { + return new ProfileResult(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(queryType); + out.writeString(luceneDescription); + out.writeLong(nodeTime); // not Vlong because can be negative + out.writeVInt(timings.size()); + for (Map.Entry entry : timings.entrySet()) { + out.writeString(entry.getKey()); + out.writeLong(entry.getValue()); + } + out.writeVInt(children.size()); + for (ProfileResult child : children) { + child.writeTo(out); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder = builder.startObject() + .field(QUERY_TYPE.getPreferredName(), queryType) + .field(LUCENE_DESCRIPTION.getPreferredName(), luceneDescription) + .field(NODE_TIME.getPreferredName(), String.format(Locale.US, "%.10gms", (double)(getTime() / 1000000.0))) + .field(BREAKDOWN.getPreferredName(), timings); + + if (!children.isEmpty()) { + builder = builder.startArray(CHILDREN.getPreferredName()); + for (ProfileResult child : children) { + builder = child.toXContent(builder, params); + } + builder = builder.endArray(); + } + + builder = builder.endObject(); + return builder; + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileScorer.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileScorer.java new file mode 100644 index 00000000000..972d176ca10 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileScorer.java @@ -0,0 +1,170 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.search.Weight; + +import java.io.IOException; +import java.util.Collection; + +/** + * {@link Scorer} wrapper that will compute how much time is spent on moving + * the iterator, confirming matches and computing scores. + */ +final class ProfileScorer extends Scorer { + + private final Scorer scorer; + private ProfileWeight profileWeight; + private final ProfileBreakdown profile; + + ProfileScorer(ProfileWeight w, Scorer scorer, ProfileBreakdown profile) throws IOException { + super(w); + this.scorer = scorer; + this.profileWeight = w; + this.profile = profile; + } + + @Override + public int docID() { + return scorer.docID(); + } + + @Override + public float score() throws IOException { + profile.startTime(ProfileBreakdown.TimingType.SCORE); + try { + return scorer.score(); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public int freq() throws IOException { + return scorer.freq(); + } + + @Override + public Weight getWeight() { + return profileWeight; + } + + @Override + public Collection getChildren() { + return scorer.getChildren(); + } + + @Override + public DocIdSetIterator iterator() { + final DocIdSetIterator in = scorer.iterator(); + return new DocIdSetIterator() { + + @Override + public int advance(int target) throws IOException { + profile.startTime(ProfileBreakdown.TimingType.ADVANCE); + try { + return in.advance(target); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public int nextDoc() throws IOException { + profile.startTime(ProfileBreakdown.TimingType.NEXT_DOC); + try { + return in.nextDoc(); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public int docID() { + return in.docID(); + } + + @Override + public long cost() { + return in.cost(); + } + }; + } + + @Override + public TwoPhaseIterator twoPhaseIterator() { + final TwoPhaseIterator in = scorer.twoPhaseIterator(); + if (in == null) { + return null; + } + final DocIdSetIterator inApproximation = in.approximation(); + final DocIdSetIterator approximation = new DocIdSetIterator() { + + @Override + public int advance(int target) throws IOException { + profile.startTime(ProfileBreakdown.TimingType.ADVANCE); + try { + return inApproximation.advance(target); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public int nextDoc() throws IOException { + profile.startTime(ProfileBreakdown.TimingType.NEXT_DOC); + try { + return inApproximation.nextDoc(); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public int docID() { + return inApproximation.docID(); + } + + @Override + public long cost() { + return inApproximation.cost(); + } + }; + return new TwoPhaseIterator(approximation) { + @Override + public boolean matches() throws IOException { + profile.startTime(ProfileBreakdown.TimingType.MATCH); + try { + return in.matches(); + } finally { + profile.stopAndRecordTime(); + } + } + + @Override + public float matchCost() { + return in.matchCost(); + } + }; + } +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java new file mode 100644 index 00000000000..c472aa791cf --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java @@ -0,0 +1,105 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * A container class to hold the profile results for a single shard in the request. + * Contains a list of query profiles, a collector tree and a total rewrite tree. + */ +public final class ProfileShardResult implements Writeable, ToXContent { + + private final List profileResults; + + private final CollectorResult profileCollector; + + private final long rewriteTime; + + public ProfileShardResult(List profileResults, long rewriteTime, + CollectorResult profileCollector) { + assert(profileCollector != null); + this.profileResults = profileResults; + this.profileCollector = profileCollector; + this.rewriteTime = rewriteTime; + } + + public ProfileShardResult(StreamInput in) throws IOException { + int profileSize = in.readVInt(); + profileResults = new ArrayList<>(profileSize); + for (int j = 0; j < profileSize; j++) { + profileResults.add(new ProfileResult(in)); + } + + profileCollector = new CollectorResult(in); + rewriteTime = in.readLong(); + } + + public List getQueryResults() { + return Collections.unmodifiableList(profileResults); + } + + public long getRewriteTime() { + return rewriteTime; + } + + public CollectorResult getCollectorResult() { + return profileCollector; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray("query"); + for (ProfileResult p : profileResults) { + p.toXContent(builder, params); + } + builder.endArray(); + builder.field("rewrite_time", rewriteTime); + builder.startArray("collector"); + profileCollector.toXContent(builder, params); + builder.endArray(); + return builder; + } + + @Override + public ProfileShardResult readFrom(StreamInput in) throws IOException { + return new ProfileShardResult(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(profileResults.size()); + for (ProfileResult p : profileResults) { + p.writeTo(out); + } + profileCollector.writeTo(out); + out.writeLong(rewriteTime); + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileWeight.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileWeight.java new file mode 100644 index 00000000000..1ce5cd721fe --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileWeight.java @@ -0,0 +1,97 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; + +import java.io.IOException; +import java.util.Set; + +/** + * Weight wrapper that will compute how much time it takes to build the + * {@link Scorer} and then return a {@link Scorer} that is wrapped in + * order to compute timings as well. + */ +public final class ProfileWeight extends Weight { + + private final Weight subQueryWeight; + private final ProfileBreakdown profile; + + public ProfileWeight(Query query, Weight subQueryWeight, ProfileBreakdown profile) throws IOException { + super(query); + this.subQueryWeight = subQueryWeight; + this.profile = profile; + } + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + profile.startTime(ProfileBreakdown.TimingType.BUILD_SCORER); + final Scorer subQueryScorer; + try { + subQueryScorer = subQueryWeight.scorer(context); + } finally { + profile.stopAndRecordTime(); + } + if (subQueryScorer == null) { + return null; + } + + return new ProfileScorer(this, subQueryScorer, profile); + } + + @Override + public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { + // We use the default bulk scorer instead of the specialized one. The reason + // is that Lucene's BulkScorers do everything at once: finding matches, + // scoring them and calling the collector, so they make it impossible to + // see where time is spent, which is the purpose of query profiling. + // The default bulk scorer will pull a scorer and iterate over matches, + // this might be a significantly different execution path for some queries + // like disjunctions, but in general this is what is done anyway + return super.bulkScorer(context); + } + + @Override + public Explanation explain(LeafReaderContext context, int doc) throws IOException { + return subQueryWeight.explain(context, doc); + } + + @Override + public float getValueForNormalization() throws IOException { + return subQueryWeight.getValueForNormalization(); + } + + @Override + public void normalize(float norm, float topLevelBoost) { + subQueryWeight.normalize(norm, topLevelBoost); + } + + @Override + public void extractTerms(Set set) { + subQueryWeight.extractTerms(set); + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/Profiler.java b/core/src/main/java/org/elasticsearch/search/profile/Profiler.java new file mode 100644 index 00000000000..011b1593d35 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/Profiler.java @@ -0,0 +1,132 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.search.Query; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +/** + * This class acts as a thread-local storage for profiling a query. It also + * builds a representation of the query tree which is built constructed + * "online" as the weights are wrapped by ContextIndexSearcher. This allows us + * to know the relationship between nodes in tree without explicitly + * walking the tree or pre-wrapping everything + * + * A Profiler is associated with every Search, not per Search-Request. E.g. a + * request may execute two searches (query + global agg). A Profiler just + * represents one of those + */ +public final class Profiler { + + private final InternalProfileTree queryTree = new InternalProfileTree(); + + /** + * The root Collector used in the search + */ + private InternalProfileCollector collector; + + public Profiler() {} + + /** Set the collector that is associated with this profiler. */ + public void setCollector(InternalProfileCollector collector) { + if (this.collector != null) { + throw new IllegalStateException("The collector can only be set once."); + } + this.collector = Objects.requireNonNull(collector); + } + + /** + * Get the {@link ProfileBreakdown} for the given query, potentially creating it if it did not exist. + * This should only be used for queries that will be undergoing scoring. Do not use it to profile the + * rewriting phase + */ + public ProfileBreakdown getQueryBreakdown(Query query) { + return queryTree.getQueryBreakdown(query); + } + + /** + * Begin timing the rewrite phase of a request. All rewrites are accumulated together into a + * single metric + */ + public void startRewriteTime() { + queryTree.startRewriteTime(); + } + + /** + * Stop recording the current rewrite and add it's time to the total tally, returning the + * cumulative time so far. + * + * @return cumulative rewrite time + */ + public long stopAndAddRewriteTime() { + return queryTree.stopAndAddRewriteTime(); + } + + /** + * Removes the last (e.g. most recent) query on the stack. This should only be called for scoring + * queries, not rewritten queries + */ + public void pollLastQuery() { + queryTree.pollLast(); + } + + /** + * @return a hierarchical representation of the profiled query tree + */ + public List getQueryTree() { + return queryTree.getQueryTree(); + } + + /** + * @return total time taken to rewrite all queries in this profile + */ + public long getRewriteTime() { + return queryTree.getRewriteTime(); + } + + /** + * Return the current root Collector for this search + */ + public CollectorResult getCollector() { + return collector.getCollectorTree(); + } + + /** + * Helper method to convert Profiler into InternalProfileShardResults, which can be + * serialized to other nodes, emitted as JSON, etc. + * + * @param profilers A list of Profilers to convert into InternalProfileShardResults + * @return A list of corresponding InternalProfileShardResults + */ + public static List buildShardResults(List profilers) { + List results = new ArrayList<>(profilers.size()); + for (Profiler profiler : profilers) { + ProfileShardResult result = new ProfileShardResult( + profiler.getQueryTree(), profiler.getRewriteTime(), profiler.getCollector()); + results.add(result); + } + return results; + } + + +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/Profilers.java b/core/src/main/java/org/elasticsearch/search/profile/Profilers.java new file mode 100644 index 00000000000..0fb7d9ac1c9 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/Profilers.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.search.internal.ContextIndexSearcher; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** Wrapper around several {@link Profiler}s that makes management easier. */ +public final class Profilers { + + private final ContextIndexSearcher searcher; + private final List profilers; + + /** Sole constructor. This {@link Profilers} instance will initiall wrap one {@link Profiler}. */ + public Profilers(ContextIndexSearcher searcher) { + this.searcher = searcher; + this.profilers = new ArrayList<>(); + addProfiler(); + } + + /** Switch to a new profile. */ + public Profiler addProfiler() { + Profiler profiler = new Profiler(); + searcher.setProfiler(profiler); + profilers.add(profiler); + return profiler; + } + + /** Get the current profiler. */ + public Profiler getCurrent() { + return profilers.get(profilers.size() - 1); + } + + /** Return the list of all created {@link Profiler}s so far. */ + public List getProfilers() { + return Collections.unmodifiableList(profilers); + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java index ce8836cd336..5352fb02895 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -52,13 +52,19 @@ import org.elasticsearch.search.SearchService; import org.elasticsearch.search.aggregations.AggregationPhase; import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.profile.CollectorResult; +import org.elasticsearch.search.profile.InternalProfileCollector; +import org.elasticsearch.search.profile.ProfileShardResult; +import org.elasticsearch.search.profile.Profiler; import org.elasticsearch.search.rescore.RescorePhase; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.sort.SortParseElement; import org.elasticsearch.search.sort.TrackScoresParseElement; import org.elasticsearch.search.suggest.SuggestPhase; +import java.util.AbstractList; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -124,6 +130,11 @@ public class QueryPhase implements SearchPhase { } suggestPhase.execute(searchContext); aggregationPhase.execute(searchContext); + + if (searchContext.getProfilers() != null) { + List shardResults = Profiler.buildShardResults(searchContext.getProfilers().getProfilers()); + searchContext.queryResult().profileResults(shardResults); + } } private static boolean returnsDocsInOrder(Query query, Sort sort) { @@ -147,6 +158,7 @@ public class QueryPhase implements SearchPhase { QuerySearchResult queryResult = searchContext.queryResult(); queryResult.searchTimedOut(false); + final boolean doProfile = searchContext.getProfilers() != null; final SearchType searchType = searchContext.searchType(); boolean rescore = false; try { @@ -162,9 +174,13 @@ public class QueryPhase implements SearchPhase { Callable topDocsCallable; assert query == searcher.rewrite(query); // already rewritten + if (searchContext.size() == 0) { // no matter what the value of from is final TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector(); collector = totalHitCountCollector; + if (searchContext.getProfilers() != null) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_COUNT, Collections.emptyList()); + } topDocsCallable = new Callable() { @Override public TopDocs call() throws Exception { @@ -219,6 +235,9 @@ public class QueryPhase implements SearchPhase { topDocsCollector = TopScoreDocCollector.create(numDocs, lastEmittedDoc); } collector = topDocsCollector; + if (doProfile) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_TOP_HITS, Collections.emptyList()); + } topDocsCallable = new Callable() { @Override public TopDocs call() throws Exception { @@ -254,27 +273,57 @@ public class QueryPhase implements SearchPhase { final boolean terminateAfterSet = searchContext.terminateAfter() != SearchContext.DEFAULT_TERMINATE_AFTER; if (terminateAfterSet) { + final Collector child = collector; // throws Lucene.EarlyTerminationException when given count is reached collector = Lucene.wrapCountBasedEarlyTerminatingCollector(collector, searchContext.terminateAfter()); + if (doProfile) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_TERMINATE_AFTER_COUNT, + Collections.singletonList((InternalProfileCollector) child)); + } } if (searchContext.parsedPostFilter() != null) { + final Collector child = collector; // this will only get applied to the actual search collector and not // to any scoped collectors, also, it will only be applied to the main collector // since that is where the filter should only work final Weight filterWeight = searcher.createNormalizedWeight(searchContext.parsedPostFilter().query(), false); collector = new FilteredCollector(collector, filterWeight); + if (doProfile) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_POST_FILTER, + Collections.singletonList((InternalProfileCollector) child)); + } } // plug in additional collectors, like aggregations - List allCollectors = new ArrayList<>(); - allCollectors.add(collector); - allCollectors.addAll(searchContext.queryCollectors().values()); - collector = MultiCollector.wrap(allCollectors); + final List subCollectors = new ArrayList<>(); + subCollectors.add(collector); + subCollectors.addAll(searchContext.queryCollectors().values()); + collector = MultiCollector.wrap(subCollectors); + if (doProfile && collector instanceof InternalProfileCollector == false) { + // When there is a single collector to wrap, MultiCollector returns it + // directly, so only wrap in the case that there are several sub collectors + final List children = new AbstractList() { + @Override + public InternalProfileCollector get(int index) { + return (InternalProfileCollector) subCollectors.get(index); + } + @Override + public int size() { + return subCollectors.size(); + } + }; + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_MULTI, children); + } // apply the minimum score after multi collector so we filter aggs as well if (searchContext.minimumScore() != null) { + final Collector child = collector; collector = new MinimumScoreCollector(collector, searchContext.minimumScore()); + if (doProfile) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_MIN_SCORE, + Collections.singletonList((InternalProfileCollector) child)); + } } if (collector.getClass() == TotalHitCountCollector.class) { @@ -319,13 +368,21 @@ public class QueryPhase implements SearchPhase { final boolean timeoutSet = searchContext.timeoutInMillis() != SearchService.NO_TIMEOUT.millis(); if (timeoutSet && collector != null) { // collector might be null if no collection is actually needed + final Collector child = collector; // TODO: change to use our own counter that uses the scheduler in ThreadPool // throws TimeLimitingCollector.TimeExceededException when timeout has reached collector = Lucene.wrapTimeLimitingCollector(collector, searchContext.timeEstimateCounter(), searchContext.timeoutInMillis()); + if (doProfile) { + collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_TIMEOUT, + Collections.singletonList((InternalProfileCollector) child)); + } } try { if (collector != null) { + if (doProfile) { + searchContext.getProfilers().getCurrent().setCollector((InternalProfileCollector) collector); + } searcher.search(query, collector); } } catch (TimeLimitingCollector.TimeExceededException e) { @@ -343,7 +400,13 @@ public class QueryPhase implements SearchPhase { queryResult.topDocs(topDocsCallable.call()); + if (searchContext.getProfilers() != null) { + List shardResults = Profiler.buildShardResults(searchContext.getProfilers().getProfilers()); + searchContext.queryResult().profileResults(shardResults); + } + return rescore; + } catch (Throwable e) { throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e); } diff --git a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index 7f8d12a9c90..9223eb5a82d 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -20,6 +20,8 @@ package org.elasticsearch.search.query; import org.apache.lucene.search.TopDocs; +import org.elasticsearch.Version; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -29,6 +31,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; +import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; @@ -53,6 +56,7 @@ public class QuerySearchResult extends QuerySearchResultProvider { private Suggest suggest; private boolean searchTimedOut; private Boolean terminatedEarly = null; + private List profileShardResults; public QuerySearchResult() { @@ -120,6 +124,22 @@ public class QuerySearchResult extends QuerySearchResultProvider { this.aggregations = aggregations; } + /** + * Returns the profiled results for this search, or potentially null if result was empty + * @return The profiled results, or null + */ + public @Nullable List profileResults() { + return profileShardResults; + } + + /** + * Sets the finalized profiling results for this query + * @param shardResults The finalized profile + */ + public void profileResults(List shardResults) { + this.profileShardResults = shardResults; + } + public List pipelineAggregators() { return pipelineAggregators; } @@ -191,6 +211,15 @@ public class QuerySearchResult extends QuerySearchResultProvider { } searchTimedOut = in.readBoolean(); terminatedEarly = in.readOptionalBoolean(); + + if (in.getVersion().onOrAfter(Version.V_2_2_0) && in.readBoolean()) { + int profileSize = in.readVInt(); + profileShardResults = new ArrayList<>(profileSize); + for (int i = 0; i < profileSize; i++) { + ProfileShardResult result = new ProfileShardResult(in); + profileShardResults.add(result); + } + } } @Override @@ -229,5 +258,17 @@ public class QuerySearchResult extends QuerySearchResultProvider { } out.writeBoolean(searchTimedOut); out.writeOptionalBoolean(terminatedEarly); + + if (out.getVersion().onOrAfter(Version.V_2_2_0)) { + if (profileShardResults == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeVInt(profileShardResults.size()); + for (ProfileShardResult shardResult : profileShardResults) { + shardResult.writeTo(out); + } + } + } } } diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java index 2a272f742e3..9fddf590ca4 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java @@ -43,7 +43,6 @@ import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.internal.SearchContext; diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java index c465eaf6efb..e4fe2c08f75 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java @@ -50,6 +50,7 @@ import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -130,7 +131,7 @@ public class ScriptSortParser implements SortParser { if (type == null) { throw new SearchParseException(context, "_script sorting requires setting the type of the script", parser.getTokenLocation()); } - final SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH); + final SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); if (STRING_SORT_TYPE.equals(type) && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) { throw new SearchParseException(context, "type [string] doesn't support mode [" + sortMode + "]", parser.getTokenLocation()); diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java index 80a9daa53f0..a99158787d3 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java @@ -253,7 +253,7 @@ public class SortParseElement implements SearchParseElement { IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.fieldData().getForField(fieldType) .comparatorSource(missing, sortMode, nested); - sortFields.add(new SortField(fieldType.names().indexName(), fieldComparatorSource, reverse)); + sortFields.add(new SortField(fieldType.name(), fieldComparatorSource, reverse)); } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java index db60d58953a..5ec92264389 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ElasticsearchException; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -36,7 +35,12 @@ import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; import org.elasticsearch.search.suggest.term.TermSuggestion; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; /** * Top level suggest result, containing the result for each suggestion. @@ -80,19 +84,19 @@ public class Suggest implements Iterable>> iterator() { return suggestions.iterator(); } - + /** * The number of suggestions in this {@link Suggest} result */ public int size() { return suggestions.size(); } - + public >> T getSuggestion(String name) { if (suggestions.isEmpty() || name == null) { return null; @@ -156,7 +160,7 @@ public class Suggest implements Iterable> group(Map> groupedSuggestions, Suggest suggest) { for (Suggestion> suggestion : suggest) { List list = groupedSuggestions.get(suggestion.getName()); @@ -193,8 +197,8 @@ public class Suggest implements Iterable implements Iterable, Streamable, ToXContent { - - + + public static final int TYPE = 0; protected String name; protected int size; @@ -211,7 +215,7 @@ public class Suggest implements Iterable sortComparator() { return COMPARATOR; } - + /** * Trims the number of options per suggest text term to the requested size. * For internal usage. @@ -293,12 +297,12 @@ public class Suggest implements Iterable comparator) { CollectionUtil.timSort(options, comparator); } @@ -481,7 +485,7 @@ public class Suggest implements Iterable LUCENE_FREQUENCY = new SuggestWordFrequencyComparator(); public static final Comparator SCORE_COMPARATOR = SuggestWordQueue.DEFAULT_COMPARATOR; - + private SuggestUtils() { // utils!! } - + public static DirectSpellChecker getDirectSpellChecker(DirectSpellcheckerSettings suggestion) { - + DirectSpellChecker directSpellChecker = new DirectSpellChecker(); directSpellChecker.setAccuracy(suggestion.accuracy()); Comparator comparator; @@ -79,7 +88,7 @@ public final class SuggestUtils { directSpellChecker.setLowerCaseTerms(false); return directSpellChecker; } - + public static BytesRef join(BytesRef separator, BytesRefBuilder result, BytesRef... toJoin) { result.clear(); for (int i = 0; i < toJoin.length - 1; i++) { @@ -89,40 +98,40 @@ public final class SuggestUtils { result.append(toJoin[toJoin.length-1]); return result.get(); } - + public static abstract class TokenConsumer { protected CharTermAttribute charTermAttr; protected PositionIncrementAttribute posIncAttr; protected OffsetAttribute offsetAttr; - + public void reset(TokenStream stream) { charTermAttr = stream.addAttribute(CharTermAttribute.class); posIncAttr = stream.addAttribute(PositionIncrementAttribute.class); offsetAttr = stream.addAttribute(OffsetAttribute.class); } - + protected BytesRef fillBytesRef(BytesRefBuilder spare) { spare.copyChars(charTermAttr); return spare.get(); } - + public abstract void nextToken() throws IOException; public void end() {} } - + public static int analyze(Analyzer analyzer, BytesRef toAnalyze, String field, TokenConsumer consumer, CharsRefBuilder spare) throws IOException { spare.copyUTF8Bytes(toAnalyze); return analyze(analyzer, spare.get(), field, consumer); } - + public static int analyze(Analyzer analyzer, CharsRef toAnalyze, String field, TokenConsumer consumer) throws IOException { try (TokenStream ts = analyzer.tokenStream( field, new FastCharArrayReader(toAnalyze.chars, toAnalyze.offset, toAnalyze.length))) { return analyze(ts, consumer); } } - + /** NOTE: this method closes the TokenStream, even on exception, which is awkward * because really the caller who called {@link Analyzer#tokenStream} should close it, * but when trying that there are recursion issues when we try to use the same @@ -147,7 +156,7 @@ public final class SuggestUtils { } return numTokens; } - + public static SuggestMode resolveSuggestMode(String suggestMode) { suggestMode = suggestMode.toLowerCase(Locale.US); if ("missing".equals(suggestMode)) { @@ -178,7 +187,7 @@ public final class SuggestUtils { return new LuceneLevenshteinDistance(); } else if ("levenstein".equals(distanceVal)) { return new LevensteinDistance(); - //TODO Jaro and Winkler are 2 people - so apply same naming logic as damerau_levenshtein + //TODO Jaro and Winkler are 2 people - so apply same naming logic as damerau_levenshtein } else if ("jarowinkler".equals(distanceVal)) { return new JaroWinklerDistance(); } else if ("ngram".equals(distanceVal)) { @@ -187,7 +196,7 @@ public final class SuggestUtils { throw new IllegalArgumentException("Illegal distance option " + distanceVal); } } - + public static class Fields { public static final ParseField STRING_DISTANCE = new ParseField("string_distance"); public static final ParseField SUGGEST_MODE = new ParseField("suggest_mode"); @@ -201,8 +210,8 @@ public final class SuggestUtils { public static final ParseField MIN_WORD_LENGTH = new ParseField("min_word_length", "min_word_len"); public static final ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq"); public static final ParseField SHARD_SIZE = new ParseField("shard_size"); - } - + } + public static boolean parseDirectSpellcheckerSettings(XContentParser parser, String fieldName, DirectSpellcheckerSettings suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException { if ("accuracy".equals(fieldName)) { @@ -233,10 +242,10 @@ public final class SuggestUtils { } return true; } - + public static boolean parseSuggestContext(XContentParser parser, MapperService mapperService, String fieldName, SuggestionSearchContext.SuggestionContext suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException { - + if ("analyzer".equals(fieldName)) { String analyzerName = parser.text(); Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName); @@ -254,10 +263,10 @@ public final class SuggestUtils { return false; } return true; - + } - - + + public static void verifySuggestion(MapperService mapperService, BytesRef globalText, SuggestionContext suggestion) { // Verify options and set defaults if (suggestion.getField() == null) { @@ -276,8 +285,8 @@ public final class SuggestUtils { suggestion.setShardSize(Math.max(suggestion.getSize(), 5)); } } - - + + public static ShingleTokenFilterFactory.Factory getShingleFilterFactory(Analyzer analyzer) { if (analyzer instanceof NamedAnalyzer) { analyzer = ((NamedAnalyzer)analyzer).analyzer(); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java index 9eba50f478a..af54e5dfd86 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java @@ -26,7 +26,11 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggester; import org.elasticsearch.search.suggest.phrase.PhraseSuggester; import org.elasticsearch.search.suggest.term.TermSuggester; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; /** * diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java index 08c0302f81e..e5e1b1b9199 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest.completion; import com.carrotsearch.hppc.ObjectLongHashMap; - import org.apache.lucene.index.Fields; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java index 928a1342ec1..a2e5f743c59 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java @@ -39,7 +39,10 @@ import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; import java.io.IOException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * Parses query options for {@link CompletionSuggester} @@ -137,13 +140,13 @@ public class CompletionSuggestParser implements SuggestContextParser { final ContextAndSuggest contextAndSuggest = new ContextAndSuggest(mapperService); TLP_PARSER.parse(parser, suggestion, contextAndSuggest); final XContentParser contextParser = contextAndSuggest.contextParser; - MappedFieldType mappedFieldType = mapperService.smartNameFieldType(suggestion.getField()); + MappedFieldType mappedFieldType = mapperService.fullName(suggestion.getField()); if (mappedFieldType == null) { throw new ElasticsearchException("Field [" + suggestion.getField() + "] is not a completion suggest field"); } else if (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType) { CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType; if (type.hasContextMappings() == false && contextParser != null) { - throw new IllegalArgumentException("suggester [" + type.names().fullName() + "] doesn't expect any context"); + throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context"); } Map> queryContexts = Collections.emptyMap(); if (type.hasContextMappings() && contextParser != null) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 106672ae7ae..527a35658c9 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -28,9 +28,9 @@ import org.apache.lucene.search.suggest.Lookup; import org.apache.lucene.search.suggest.document.CompletionQuery; import org.apache.lucene.search.suggest.document.TopSuggestDocs; import org.apache.lucene.search.suggest.document.TopSuggestDocsCollector; -import org.apache.lucene.util.*; +import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.PriorityQueue; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.MappedFieldType; @@ -40,7 +40,13 @@ import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.Suggester; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; public class CompletionSuggester extends Suggester { @@ -57,7 +63,7 @@ public class CompletionSuggester extends Suggester } CompletionSuggestion completionSuggestion = new CompletionSuggestion(name, suggestionContext.getSize()); spare.copyUTF8Bytes(suggestionContext.getText()); - CompletionSuggestion.Entry completionSuggestEntry = new CompletionSuggestion.Entry(new StringText(spare.toString()), 0, spare.length()); + CompletionSuggestion.Entry completionSuggestEntry = new CompletionSuggestion.Entry(new Text(spare.toString()), 0, spare.length()); completionSuggestion.addTerm(completionSuggestEntry); TopSuggestDocsCollector collector = new TopDocumentsCollector(suggestionContext.getSize()); suggest(searcher, suggestionContext.toQuery(), collector); @@ -78,7 +84,7 @@ public class CompletionSuggester extends Suggester final LeafReaderContext subReaderContext = leaves.get(readerIndex); final int subDocId = suggestDoc.doc - subReaderContext.docBase; for (String field : payloadFields) { - MappedFieldType payloadFieldType = suggestionContext.getMapperService().smartNameFieldType(field); + MappedFieldType payloadFieldType = suggestionContext.getMapperService().fullName(field); if (payloadFieldType != null) { final AtomicFieldData data = suggestionContext.getIndexFieldDataService().getForField(payloadFieldType).load(subReaderContext); final ScriptDocValues scriptValues = data.getScriptValues(); @@ -91,7 +97,7 @@ public class CompletionSuggester extends Suggester } if (numResult++ < suggestionContext.getSize()) { CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option( - new StringText(suggestDoc.key.toString()), suggestDoc.score, contexts, payload); + new Text(suggestDoc.key.toString()), suggestDoc.score, contexts, payload); completionSuggestEntry.addOption(option); } else { break; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java index 66c21c58162..ed3ebead7f5 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java @@ -21,14 +21,19 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.search.suggest.Lookup; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * Suggestion response for {@link CompletionSuggester} results diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index fe80f70e260..100e701c03c 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -30,15 +30,19 @@ import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; import java.io.IOException; -import java.util.*; - -import static org.elasticsearch.search.suggest.completion.context.CategoryContextMapping.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * Defines a suggest command based on a prefix, typically to provide "auto-complete" functionality * for users as they type search terms. The implementation of the completion service uses FSTs that - * are created at index-time and so must be defined in the mapping with the type "completion" before - * indexing. + * are created at index-time and so must be defined in the mapping with the type "completion" before + * indexing. */ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilder { private FuzzyOptionsBuilder fuzzyOptionsBuilder; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java index 8ffd497eb3a..535151b476e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java @@ -28,7 +28,11 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; -import java.util.*; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java index 23c9ca730b9..dffbb1aa80d 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java @@ -29,7 +29,13 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; /** * A {@link ContextMapping} that uses a simple string as a criteria diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java index ee2655ebdda..c4931265776 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java @@ -21,7 +21,10 @@ package org.elasticsearch.search.suggest.completion.context; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java index b15577d6fb2..42e5cc0a157 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java @@ -28,7 +28,9 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.List; +import java.util.Objects; +import java.util.Set; /** * A {@link ContextMapping} defines criteria that can be used to diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java index 87b702c2ffb..9d4bed4f664 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java @@ -32,9 +32,19 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; -import static org.elasticsearch.search.suggest.completion.context.ContextMapping.*; +import static org.elasticsearch.search.suggest.completion.context.ContextMapping.FIELD_NAME; +import static org.elasticsearch.search.suggest.completion.context.ContextMapping.FIELD_TYPE; +import static org.elasticsearch.search.suggest.completion.context.ContextMapping.QueryContext; +import static org.elasticsearch.search.suggest.completion.context.ContextMapping.Type; /** * ContextMappings indexes context-enabled suggestion fields diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index 57283c1bd05..f2f3d10215d 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -36,7 +36,13 @@ import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; /** * A {@link ContextMapping} that uses a geo location/area as a @@ -295,14 +301,14 @@ public class GeoContextMapping extends ContextMapping { private int precision = DEFAULT_PRECISION; private String fieldName = null; - + protected Builder(String name) { super(name); } /** * Set the precision use o make suggestions - * + * * @param precision * precision as distance with {@link DistanceUnit}. Default: * meters @@ -314,7 +320,7 @@ public class GeoContextMapping extends ContextMapping { /** * Set the precision use o make suggestions - * + * * @param precision * precision value * @param unit @@ -327,23 +333,23 @@ public class GeoContextMapping extends ContextMapping { /** * Set the precision use o make suggestions - * + * * @param meters * precision as distance in meters * @return this */ public Builder precision(double meters) { int level = GeoUtils.geoHashLevelsForPrecision(meters); - // Ceiling precision: we might return more results + // Ceiling precision: we might return more results if (GeoUtils.geoHashCellSize(level) < meters) { - level = Math.max(1, level - 1); + level = Math.max(1, level - 1); } return precision(level); } /** * Set the precision use o make suggestions - * + * * @param level * maximum length of geohashes * @return this diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java index 75cab1e8e89..da9191bf2d5 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java @@ -32,7 +32,10 @@ import java.io.IOException; import java.util.Collections; import java.util.List; -import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.*; +import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_BOOST; +import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_NEIGHBOURS; +import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_PRECISION; +import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_VALUE; /** * Defines the query context for {@link GeoContextMapping} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java index 56c6181d612..f98822296b0 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java @@ -18,12 +18,12 @@ */ package org.elasticsearch.search.suggest.phrase; -import java.io.IOException; - import org.apache.lucene.util.BytesRef; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidate; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet; +import java.io.IOException; + //TODO public for tests public abstract class CandidateGenerator { @@ -35,7 +35,7 @@ public abstract class CandidateGenerator { CandidateSet set = new CandidateSet(Candidate.EMPTY, createCandidate(term, true)); return drawCandidates(set); } - + public Candidate createCandidate(BytesRef term, boolean userInput) throws IOException { return createCandidate(term, frequency(term), 1.0, userInput); } @@ -47,4 +47,4 @@ public abstract class CandidateGenerator { public abstract CandidateSet drawCandidates(CandidateSet set) throws IOException; -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java index b027497479c..b3dad43ad34 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java @@ -17,13 +17,13 @@ * under the License. */ package org.elasticsearch.search.suggest.phrase; -import java.io.IOException; -import java.util.Arrays; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidate; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet; +import java.io.IOException; + final class CandidateScorer { private final WordScorer scorer; private final int maxNumCorrections; @@ -34,8 +34,8 @@ final class CandidateScorer { this.maxNumCorrections = maxNumCorrections; this.gramSize = gramSize; } - - + + public Correction[] findBestCandiates(CandidateSet[] sets, float errorFraction, double cutoffScore) throws IOException { if (sets.length == 0) { return Correction.EMPTY; @@ -107,7 +107,7 @@ final class CandidateScorer { } } } - + public double score(Candidate[] path, CandidateSet[] candidates) throws IOException { double score = 0.0d; for (int i = 0; i < candidates.length; i++) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java index 8af181f0e63..5b937500d6b 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java @@ -19,7 +19,11 @@ package org.elasticsearch.search.suggest.phrase; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.index.*; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.MultiFields; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.search.spell.SuggestMode; import org.apache.lucene.search.spell.SuggestWord; @@ -29,7 +33,12 @@ import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.search.suggest.SuggestUtils; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; //TODO public for tests public final class DirectCandidateGenerator extends CandidateGenerator { @@ -49,7 +58,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator { private final CharsRefBuilder spare = new CharsRefBuilder(); private final BytesRefBuilder byteSpare = new BytesRefBuilder(); private final int numCandidates; - + public DirectCandidateGenerator(DirectSpellChecker spellchecker, String field, SuggestMode suggestMode, IndexReader reader, double nonErrorLikelihood, int numCandidates) throws IOException { this(spellchecker, field, suggestMode, reader, nonErrorLikelihood, numCandidates, null, null, MultiFields.getTerms(reader, field)); } @@ -95,15 +104,15 @@ public final class DirectCandidateGenerator extends CandidateGenerator { public long internalFrequency(BytesRef term) throws IOException { if (termsEnum.seekExact(term)) { - return useTotalTermFrequency ? termsEnum.totalTermFreq() : termsEnum.docFreq(); + return useTotalTermFrequency ? termsEnum.totalTermFreq() : termsEnum.docFreq(); } return 0; } - + public String getField() { return field; } - + /* (non-Javadoc) * @see org.elasticsearch.search.suggest.phrase.CandidateGenerator#drawCandidates(org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet, int) */ @@ -123,14 +132,14 @@ public final class DirectCandidateGenerator extends CandidateGenerator { set.addCandidates(candidates); return set; } - + protected BytesRef preFilter(final BytesRef term, final CharsRefBuilder spare, final BytesRefBuilder byteSpare) throws IOException { if (preFilter == null) { return term; } final BytesRefBuilder result = byteSpare; SuggestUtils.analyze(preFilter, term, field, new SuggestUtils.TokenConsumer() { - + @Override public void nextToken() throws IOException { this.fillBytesRef(result); @@ -138,7 +147,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator { }, spare); return result.get(); } - + protected void postFilter(final Candidate candidate, final CharsRefBuilder spare, BytesRefBuilder byteSpare, final List candidates) throws IOException { if (postFilter == null) { candidates.add(candidate); @@ -148,11 +157,11 @@ public final class DirectCandidateGenerator extends CandidateGenerator { @Override public void nextToken() throws IOException { this.fillBytesRef(result); - + if (posIncAttr.getPositionIncrement() > 0 && result.get().bytesEquals(candidate.term)) { BytesRef term = result.toBytesRef(); // We should not use frequency(term) here because it will analyze the term again - // If preFilter and postFilter are the same analyzer it would fail. + // If preFilter and postFilter are the same analyzer it would fail. long freq = internalFrequency(term); candidates.add(new Candidate(result.toBytesRef(), freq, candidate.stringDistance, score(candidate.frequency, candidate.stringDistance, dictSize), false)); } else { @@ -162,19 +171,19 @@ public final class DirectCandidateGenerator extends CandidateGenerator { }, spare); } } - + private double score(long frequency, double errorScore, long dictionarySize) { return errorScore * (((double)frequency + 1) / ((double)dictionarySize +1)); } - + protected long thresholdFrequency(long termFrequency, long dictionarySize) { if (termFrequency > 0) { return (long) Math.max(0, Math.round(termFrequency * (Math.log10(termFrequency - frequencyPlateau) * (1.0 / Math.log10(logBase))) + 1)); } return 0; - + } - + public static class CandidateSet { public Candidate[] candidates; public final Candidate originalTerm; @@ -183,7 +192,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator { this.candidates = candidates; this.originalTerm = originalTerm; } - + public void addCandidates(List candidates) { // Merge new candidates into existing ones, // deduping: @@ -223,7 +232,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator { @Override public String toString() { - return "Candidate [term=" + term.utf8ToString() + ", stringDistance=" + stringDistance + ", score=" + score + ", frequency=" + frequency + + return "Candidate [term=" + term.utf8ToString() + ", stringDistance=" + stringDistance + ", score=" + score + ", frequency=" + frequency + (userInput ? ", userInput" : "" ) + "]"; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java index 0d6f8932983..4936973b237 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java @@ -18,17 +18,17 @@ */ package org.elasticsearch.search.suggest.phrase; -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; - import org.apache.lucene.util.BytesRef; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidate; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Comparator; //TODO public for tests public final class MultiCandidateGeneratorWrapper extends CandidateGenerator { - + private final CandidateGenerator[] candidateGenerator; private int numCandidates ; @@ -53,7 +53,7 @@ public final class MultiCandidateGeneratorWrapper extends CandidateGenerator { } return reduce(set, numCandidates); } - + private final CandidateSet reduce(CandidateSet set, int numCandidates) { if (set.candidates.length > numCandidates) { Candidate[] candidates = set.candidates; @@ -61,14 +61,14 @@ public final class MultiCandidateGeneratorWrapper extends CandidateGenerator { @Override public int compare(Candidate left, Candidate right) { - return Double.compare(right.score, left.score); + return Double.compare(right.score, left.score); } }); Candidate[] newSet = new Candidate[numCandidates]; System.arraycopy(candidates, 0, newSet, 0, numCandidates); set.candidates = newSet; } - + return set; } @Override diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java index 4bbdaf9c49e..0b904a95720 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java @@ -39,6 +39,7 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; import java.io.IOException; +import java.util.Collections; public final class PhraseSuggestParser implements SuggestContextParser { @@ -143,7 +144,7 @@ public final class PhraseSuggestParser implements SuggestContextParser { } Template template = Template.parse(parser, parseFieldMatcher); CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH, - headersContext); + headersContext, Collections.emptyMap()); suggestion.setCollateQueryScript(compiledScript); } else if ("params".equals(fieldName)) { suggestion.setCollateScriptParams(parser.map()); @@ -170,7 +171,7 @@ public final class PhraseSuggestParser implements SuggestContextParser { throw new IllegalArgumentException("The required field option is missing"); } - MappedFieldType fieldType = mapperService.smartNameFieldType(suggestion.getField()); + MappedFieldType fieldType = mapperService.fullName(suggestion.getField()); if (fieldType == null) { throw new IllegalArgumentException("No mapping found for field [" + suggestion.getField() + "]"); } else if (suggestion.getAnalyzer() == null) { @@ -328,7 +329,7 @@ public final class PhraseSuggestParser implements SuggestContextParser { if (!SuggestUtils.parseDirectSpellcheckerSettings(parser, fieldName, generator, parseFieldMatcher)) { if ("field".equals(fieldName)) { generator.setField(parser.text()); - if (mapperService.smartNameFieldType(generator.field()) == null) { + if (mapperService.fullName(generator.field()) == null) { throw new IllegalArgumentException("No mapping found for field [" + generator.field() + "]"); } } else if ("size".equals(fieldName)) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index fccf9ebc30e..c7fa6fae302 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -30,7 +30,6 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.query.ParsedQuery; @@ -127,11 +126,11 @@ public final class PhraseSuggester extends Suggester { if (!collateMatch && !collatePrune) { continue; } - Text phrase = new StringText(spare.toString()); + Text phrase = new Text(spare.toString()); Text highlighted = null; if (suggestion.getPreTag() != null) { spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, suggestion.getPreTag(), suggestion.getPostTag())); - highlighted = new StringText(spare.toString()); + highlighted = new Text(spare.toString()); } if (collatePrune) { resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch)); @@ -147,7 +146,7 @@ public final class PhraseSuggester extends Suggester { private PhraseSuggestion.Entry buildResultEntry(PhraseSuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) { spare.copyUTF8Bytes(suggestion.getText()); - return new PhraseSuggestion.Entry(new StringText(spare.toString()), 0, spare.length(), cutoffScore); + return new PhraseSuggestion.Entry(new Text(spare.toString()), 0, spare.length(), cutoffScore); } ScriptService scriptService() { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java index 8d2a6fdd123..04d06abbfd0 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java @@ -18,11 +18,6 @@ */ package org.elasticsearch.search.suggest.phrase; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.util.BytesRef; import org.elasticsearch.script.CompiledScript; @@ -30,6 +25,11 @@ import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + class PhraseSuggestionContext extends SuggestionContext { private final BytesRef SEPARATOR = new BytesRef(" "); private float maxErrors = 0.5f; @@ -81,27 +81,27 @@ class PhraseSuggestionContext extends SuggestionContext { public void addGenerator(DirectCandidateGenerator generator) { this.generators.add(generator); } - + public List generators() { return this.generators ; } - + public void setGramSize(int gramSize) { this.gramSize = gramSize; } - + public int gramSize() { return gramSize; } - + public float confidence() { return confidence; } - + public void setConfidence(float confidence) { this.confidence = confidence; } - + public void setModel(WordScorer.WordScorerFactory scorer) { this.scorer = scorer; } @@ -134,7 +134,7 @@ class PhraseSuggestionContext extends SuggestionContext { } this.size = size; } - + public Analyzer preFilter() { return preFilter; } @@ -150,22 +150,22 @@ class PhraseSuggestionContext extends SuggestionContext { public void postFilter(Analyzer postFilter) { this.postFilter = postFilter; } - - + + } public void setRequireUnigram(boolean requireUnigram) { this.requireUnigram = requireUnigram; } - + public boolean getRequireUnigram() { return requireUnigram; } - + public void setTokenLimit(int tokenLimit) { this.tokenLimit = tokenLimit; } - + public int getTokenLimit() { return tokenLimit; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java index 4c1b176c990..34cd3ad4d56 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java @@ -27,8 +27,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.text.BytesText; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils; @@ -54,10 +52,10 @@ public final class TermSuggester extends Suggester { SuggestWord[] suggestedWords = directSpellChecker.suggestSimilar( token.term, suggestion.getShardSize(), indexReader, suggestion.getDirectSpellCheckerSettings().suggestMode() ); - Text key = new BytesText(new BytesArray(token.term.bytes())); + Text key = new Text(new BytesArray(token.term.bytes())); TermSuggestion.Entry resultEntry = new TermSuggestion.Entry(key, token.startOffset, token.endOffset - token.startOffset); for (SuggestWord suggestWord : suggestedWords) { - Text word = new StringText(suggestWord.string); + Text word = new Text(suggestWord.string); resultEntry.addOption(new TermSuggestion.Entry.Option(word, suggestWord.freq, suggestWord.score)); } response.addTerm(resultEntry); diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index cd710d52cdc..e167a03f8fc 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -25,16 +25,30 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.RestoreInProgress.ShardRestoreStatus; import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.*; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; +import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; +import org.elasticsearch.cluster.metadata.RepositoriesMetaData; +import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RestoreSource; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.settings.ClusterDynamicSettings; -import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -44,6 +58,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -53,16 +68,35 @@ import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CopyOnWriteArrayList; import static java.util.Collections.unmodifiableSet; -import static org.elasticsearch.cluster.metadata.IndexMetaData.*; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_INDEX_UUID; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_UPGRADED; import static org.elasticsearch.common.util.set.Sets.newHashSet; /** @@ -118,18 +152,19 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis private final MetaDataCreateIndexService createIndexService; - private final DynamicSettings dynamicSettings; + private final ClusterSettings dynamicSettings; private final MetaDataIndexUpgradeService metaDataIndexUpgradeService; private final CopyOnWriteArrayList> listeners = new CopyOnWriteArrayList<>(); private final BlockingQueue updatedSnapshotStateQueue = ConcurrentCollections.newBlockingQueue(); + private final ClusterSettings clusterSettings; @Inject public RestoreService(Settings settings, ClusterService clusterService, RepositoriesService repositoriesService, TransportService transportService, - AllocationService allocationService, MetaDataCreateIndexService createIndexService, @ClusterDynamicSettings DynamicSettings dynamicSettings, - MetaDataIndexUpgradeService metaDataIndexUpgradeService) { + AllocationService allocationService, MetaDataCreateIndexService createIndexService, ClusterSettings dynamicSettings, + MetaDataIndexUpgradeService metaDataIndexUpgradeService, ClusterSettings clusterSettings) { super(settings); this.clusterService = clusterService; this.repositoriesService = repositoriesService; @@ -140,6 +175,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis this.metaDataIndexUpgradeService = metaDataIndexUpgradeService; transportService.registerRequestHandler(UPDATE_RESTORE_ACTION_NAME, UpdateIndexShardRestoreStatusRequest::new, ThreadPool.Names.SAME, new UpdateRestoreStateRequestHandler()); clusterService.add(this); + this.clusterSettings = clusterSettings; } /** @@ -389,24 +425,9 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis private void restoreGlobalStateIfRequested(MetaData.Builder mdBuilder) { if (request.includeGlobalState()) { if (metaData.persistentSettings() != null) { - boolean changed = false; - Settings.Builder persistentSettings = Settings.settingsBuilder().put(); - for (Map.Entry entry : metaData.persistentSettings().getAsMap().entrySet()) { - if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) { - String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue(), clusterService.state()); - if (error == null) { - persistentSettings.put(entry.getKey(), entry.getValue()); - changed = true; - } else { - logger.warn("ignoring persistent setting [{}], [{}]", entry.getKey(), error); - } - } else { - logger.warn("ignoring persistent setting [{}], not dynamically updateable", entry.getKey()); - } - } - if (changed) { - mdBuilder.persistentSettings(persistentSettings.build()); - } + Settings settings = metaData.persistentSettings(); + clusterSettings.dryRun(settings); + mdBuilder.persistentSettings(settings); } if (metaData.templates() != null) { // TODO: Should all existing templates be deleted first? diff --git a/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java b/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java index 1206ef53501..42eb255e8dd 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java +++ b/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java @@ -22,7 +22,11 @@ package org.elasticsearch.snapshots; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.FromXContentBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotCreationException.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotCreationException.java index 3c8cdc4d067..58faecda4a4 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotCreationException.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotCreationException.java @@ -19,7 +19,6 @@ package org.elasticsearch.snapshots; -import org.elasticsearch.ElasticsearchWrapperException; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index 3033a0ff801..7b9506190f2 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -18,11 +18,6 @@ */ package org.elasticsearch.snapshots; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - import org.elasticsearch.Version; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.io.stream.StreamInput; @@ -35,6 +30,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.rest.RestStatus; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + /** * Information about snapshot */ @@ -327,4 +327,4 @@ public class SnapshotInfo implements ToXContent, Streamable { return in.readOptionalStreamable(SnapshotInfo::new); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 91cf2af4ec5..340a7f6ce83 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -20,7 +20,6 @@ package org.elasticsearch.snapshots; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.index.IndexCommit; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.ClusterChangedEvent; diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index bf3af7394dd..b1d16a04ead 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -21,7 +21,6 @@ package org.elasticsearch.snapshots; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.ShardSearchFailure; diff --git a/core/src/main/java/org/elasticsearch/tasks/ChildTask.java b/core/src/main/java/org/elasticsearch/tasks/ChildTask.java new file mode 100644 index 00000000000..14d49baf398 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/tasks/ChildTask.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.tasks; + +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.inject.Provider; + +/** + * Child task + */ +public class ChildTask extends Task { + + private final String parentNode; + + private final long parentId; + + public ChildTask(long id, String type, String action, Provider description, String parentNode, long parentId) { + super(id, type, action, description); + this.parentNode = parentNode; + this.parentId = parentId; + } + + /** + * Returns parent node of the task or null if task doesn't have any parent tasks + */ + public String getParentNode() { + return parentNode; + } + + /** + * Returns id of the parent task or -1L if task doesn't have any parent tasks + */ + public long getParentId() { + return parentId; + } + + public TaskInfo taskInfo(DiscoveryNode node, boolean detailed) { + return new TaskInfo(node, getId(), getType(), getAction(), detailed ? getDescription() : null, parentNode, parentId); + } +} diff --git a/core/src/main/java/org/elasticsearch/tasks/Task.java b/core/src/main/java/org/elasticsearch/tasks/Task.java new file mode 100644 index 00000000000..9e925b09d1a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/tasks/Task.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +package org.elasticsearch.tasks; + +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.inject.Provider; + +/** + * Current task information + */ +public class Task { + + private final long id; + + private final String type; + + private final String action; + + private final Provider description; + + public Task(long id, String type, String action, Provider description) { + this.id = id; + this.type = type; + this.action = action; + this.description = description; + } + + public TaskInfo taskInfo(DiscoveryNode node, boolean detailed) { + return new TaskInfo(node, id, type, action, detailed ? getDescription() : null); + } + + /** + * Returns task id + */ + public long getId() { + return id; + } + + /** + * Returns task channel type (netty, transport, direct) + */ + public String getType() { + return type; + } + + /** + * Returns task action + */ + public String getAction() { + return action; + } + + /** + * Generates task description + */ + public String getDescription() { + return description.get(); + } + +} diff --git a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java new file mode 100644 index 00000000000..68e2dcbe9a5 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tasks; + +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; +import org.elasticsearch.transport.TransportRequest; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; + +/** + * Task Manager service for keeping track of currently running tasks on the nodes + */ +public class TaskManager extends AbstractComponent { + + private final ConcurrentMapLong tasks = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency(); + + private final AtomicLong taskIdGenerator = new AtomicLong(); + + public TaskManager(Settings settings) { + super(settings); + } + + /** + * Registers a task without parent task + */ + public Task register(String type, String action, TransportRequest request) { + Task task = request.createTask(taskIdGenerator.incrementAndGet(), type, action); + if (task != null) { + if (logger.isTraceEnabled()) { + logger.trace("register {} [{}] [{}] [{}]", task.getId(), type, action, task.getDescription()); + } + Task previousTask = tasks.put(task.getId(), task); + assert previousTask == null; + } + return task; + } + + /** + * Unregister the task + */ + public void unregister(Task task) { + logger.trace("unregister task for id: {}", task.getId()); + tasks.remove(task.getId()); + } + + /** + * Returns the list of currently running tasks on the node + */ + public Map getTasks() { + return Collections.unmodifiableMap(new HashMap<>(tasks)); + } + +} diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index b0d81279b03..5d0c814a285 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -20,13 +20,13 @@ package org.elasticsearch.threadpool; import org.apache.lucene.util.Counter; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.SizeValue; @@ -38,14 +38,26 @@ import org.elasticsearch.common.util.concurrent.XRejectedExecutionHandler; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.node.settings.NodeSettingsService; import java.io.IOException; -import java.util.*; -import java.util.concurrent.*; -import java.util.function.Function; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.RejectedExecutionHandler; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -172,7 +184,7 @@ public class ThreadPool extends AbstractComponent { } } - public static final String THREADPOOL_GROUP = "threadpool."; + public static final Setting THREADPOOL_GROUP_SETTING = Setting.groupSetting("threadpool.", true, Setting.Scope.CLUSTER); private volatile Map executors; @@ -184,7 +196,7 @@ public class ThreadPool extends AbstractComponent { private final EstimatedTimeThread estimatedTimeThread; - private boolean settingsListenerIsSet = false; + private final AtomicBoolean settingsListenerIsSet = new AtomicBoolean(false); static final Executor DIRECT_EXECUTOR = command -> command.run(); @@ -197,7 +209,8 @@ public class ThreadPool extends AbstractComponent { assert settings.get("name") != null : "ThreadPool's settings should contain a name"; - Map groupSettings = getThreadPoolSettingsGroup(settings); + Map groupSettings = THREADPOOL_GROUP_SETTING.get(settings).getAsGroups(); + validate(groupSettings); int availableProcessors = EsExecutors.boundedNumberOfProcessors(settings); int halfProcMaxAt5 = Math.min(((availableProcessors + 1) / 2), 5); @@ -252,18 +265,12 @@ public class ThreadPool extends AbstractComponent { this.estimatedTimeThread.start(); } - private Map getThreadPoolSettingsGroup(Settings settings) { - Map groupSettings = settings.getGroups(THREADPOOL_GROUP); - validate(groupSettings); - return groupSettings; - } - - public void setNodeSettingsService(NodeSettingsService nodeSettingsService) { - if(settingsListenerIsSet) { + public void setClusterSettings(ClusterSettings clusterSettings) { + if(settingsListenerIsSet.compareAndSet(false, true)) { + clusterSettings.addSettingsUpdateConsumer(THREADPOOL_GROUP_SETTING, this::updateSettings, (s) -> validate(s.getAsGroups())); + } else { throw new IllegalStateException("the node settings listener was set more then once"); } - nodeSettingsService.addListener(new ApplySettings()); - settingsListenerIsSet = true; } public long estimatedTimeInMillis() { @@ -526,8 +533,8 @@ public class ThreadPool extends AbstractComponent { throw new IllegalArgumentException("No type found [" + type + "], for [" + name + "]"); } - public void updateSettings(Settings settings) { - Map groupSettings = getThreadPoolSettingsGroup(settings); + private void updateSettings(Settings settings) { + Map groupSettings = settings.getAsGroups(); if (groupSettings.isEmpty()) { return; } @@ -583,7 +590,7 @@ public class ThreadPool extends AbstractComponent { ThreadPoolType correctThreadPoolType = THREAD_POOL_TYPES.get(key); // TODO: the type equality check can be removed after #3760/#6732 are addressed if (type != null && !correctThreadPoolType.getType().equals(type)) { - throw new IllegalArgumentException("setting " + THREADPOOL_GROUP + key + ".type to " + type + " is not permitted; must be " + correctThreadPoolType.getType()); + throw new IllegalArgumentException("setting " + THREADPOOL_GROUP_SETTING.getKey() + key + ".type to " + type + " is not permitted; must be " + correctThreadPoolType.getType()); } } } @@ -866,13 +873,6 @@ public class ThreadPool extends AbstractComponent { } - class ApplySettings implements NodeSettingsService.Listener { - @Override - public void onRefreshSettings(Settings settings) { - updateSettings(settings); - } - } - /** * Returns true if the given service was terminated successfully. If the termination timed out, * the service is null this method will return false. @@ -911,38 +911,4 @@ public class ThreadPool extends AbstractComponent { } return false; } - - public static ThreadPoolTypeSettingsValidator THREAD_POOL_TYPE_SETTINGS_VALIDATOR = new ThreadPoolTypeSettingsValidator(); - private static class ThreadPoolTypeSettingsValidator implements Validator { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - // TODO: the type equality validation can be removed after #3760/#6732 are addressed - Matcher matcher = Pattern.compile("threadpool\\.(.*)\\.type").matcher(setting); - if (!matcher.matches()) { - return null; - } else { - String threadPool = matcher.group(1); - ThreadPool.ThreadPoolType defaultThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPool); - ThreadPool.ThreadPoolType threadPoolType; - try { - threadPoolType = ThreadPool.ThreadPoolType.fromType(value); - } catch (IllegalArgumentException e) { - return e.getMessage(); - } - if (defaultThreadPoolType.equals(threadPoolType)) { - return null; - } else { - return String.format( - Locale.ROOT, - "thread pool type for [%s] can only be updated to [%s] but was [%s]", - threadPool, - defaultThreadPoolType.getType(), - threadPoolType.getType() - ); - } - } - - } - } - } diff --git a/core/src/main/java/org/elasticsearch/transport/DelegatingTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/DelegatingTransportChannel.java new file mode 100644 index 00000000000..f6b178dba8d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/transport/DelegatingTransportChannel.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import java.io.IOException; + +/** + * Wrapper around transport channel that delegates all requests to the + * underlying channel + */ +public class DelegatingTransportChannel implements TransportChannel { + + private final TransportChannel channel; + + protected DelegatingTransportChannel(TransportChannel channel) { + this.channel = channel; + } + + @Override + public String action() { + return channel.action(); + } + + @Override + public String getProfileName() { + return channel.getProfileName(); + } + + @Override + public long getRequestId() { + return channel.getRequestId(); + } + + @Override + public String getChannelType() { + return channel.getChannelType(); + } + + @Override + public void sendResponse(TransportResponse response) throws IOException { + channel.sendResponse(response); + } + + @Override + public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException { + channel.sendResponse(response, options); + } + + @Override + public void sendResponse(Throwable error) throws IOException { + channel.sendResponse(error); + } + + public TransportChannel getChannel() { + return channel; + } +} diff --git a/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java b/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java index 5fea9489291..e58df27644e 100644 --- a/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java +++ b/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java @@ -19,9 +19,10 @@ package org.elasticsearch.transport; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; -import java.lang.reflect.Constructor; -import java.util.concurrent.Callable; +import java.io.IOException; import java.util.function.Supplier; /** @@ -34,14 +35,16 @@ public class RequestHandlerRegistry { private final boolean forceExecution; private final String executor; private final Supplier requestFactory; + private final TaskManager taskManager; - public RequestHandlerRegistry(String action, Supplier requestFactory, TransportRequestHandler handler, String executor, boolean forceExecution) { + public RequestHandlerRegistry(String action, Supplier requestFactory, TaskManager taskManager, TransportRequestHandler handler, String executor, boolean forceExecution) { this.action = action; this.requestFactory = requestFactory; assert newRequest() != null; this.handler = handler; this.forceExecution = forceExecution; this.executor = executor; + this.taskManager = taskManager; } public String getAction() { @@ -52,8 +55,21 @@ public class RequestHandlerRegistry { return requestFactory.get(); } - public TransportRequestHandler getHandler() { - return handler; + public void processMessageReceived(Request request, TransportChannel channel) throws Exception { + final Task task = taskManager.register(channel.getChannelType(), action, request); + if (task == null) { + handler.messageReceived(request, channel); + } else { + boolean success = false; + try { + handler.messageReceived(request, new TransportChannelWrapper(taskManager, task, channel), task); + success = true; + } finally { + if (success == false) { + taskManager.unregister(task); + } + } + } } public boolean isForceExecution() { @@ -63,4 +79,44 @@ public class RequestHandlerRegistry { public String getExecutor() { return executor; } + + @Override + public String toString() { + return handler.toString(); + } + + private static class TransportChannelWrapper extends DelegatingTransportChannel { + + private final Task task; + + private final TaskManager taskManager; + + public TransportChannelWrapper(TaskManager taskManager, Task task, TransportChannel channel) { + super(channel); + this.task = task; + this.taskManager = taskManager; + } + + @Override + public void sendResponse(TransportResponse response) throws IOException { + endTask(); + super.sendResponse(response); + } + + @Override + public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException { + endTask(); + super.sendResponse(response, options); + } + + @Override + public void sendResponse(Throwable error) throws IOException { + endTask(); + super.sendResponse(error); + } + + private void endTask() { + taskManager.unregister(task); + } + } } diff --git a/core/src/main/java/org/elasticsearch/transport/Transport.java b/core/src/main/java/org/elasticsearch/transport/Transport.java index 10fa9b239dc..78b07e3aae3 100644 --- a/core/src/main/java/org/elasticsearch/transport/Transport.java +++ b/core/src/main/java/org/elasticsearch/transport/Transport.java @@ -21,6 +21,8 @@ package org.elasticsearch.transport; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.LifecycleComponent; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -34,9 +36,8 @@ import java.util.Map; public interface Transport extends LifecycleComponent { - public static class TransportSettings { - public static final String TRANSPORT_TCP_COMPRESS = "transport.tcp.compress"; - } + Setting TRANSPORT_PROFILES_SETTING = Setting.groupSetting("transport.profiles.", true, Setting.Scope.CLUSTER); + Setting TRANSPORT_TCP_COMPRESS = Setting.boolSetting("transport.tcp.compress", false, false, Setting.Scope.CLUSTER); void transportServiceAdapter(TransportServiceAdapter service); diff --git a/core/src/main/java/org/elasticsearch/transport/TransportChannel.java b/core/src/main/java/org/elasticsearch/transport/TransportChannel.java index 4c7678d60f0..53fd4ebe91e 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportChannel.java @@ -30,6 +30,10 @@ public interface TransportChannel { String getProfileName(); + long getRequestId(); + + String getChannelType(); + void sendResponse(TransportResponse response) throws IOException; void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java b/core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java new file mode 100644 index 00000000000..8c042cd1937 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.IOException; + +/** + * Base class for delegating transport response to a transport channel + */ +public abstract class TransportChannelResponseHandler implements TransportResponseHandler { + + /** + * Convenience method for delegating an empty response to the provided changed + */ + public static TransportChannelResponseHandler emptyResponseHandler(ESLogger logger, TransportChannel channel, String extraInfoOnError) { + return new TransportChannelResponseHandler(logger, channel, extraInfoOnError) { + @Override + public TransportResponse.Empty newInstance() { + return TransportResponse.Empty.INSTANCE; + } + }; + } + + private final ESLogger logger; + private final TransportChannel channel; + private final String extraInfoOnError; + + protected TransportChannelResponseHandler(ESLogger logger, TransportChannel channel, String extraInfoOnError) { + this.logger = logger; + this.channel = channel; + this.extraInfoOnError = extraInfoOnError; + } + + @Override + public void handleResponse(T response) { + try { + channel.sendResponse(response); + } catch (IOException e) { + handleException(new TransportException(e)); + } + } + + @Override + public void handleException(TransportException exp) { + try { + channel.sendResponse(exp); + } catch (IOException e) { + logger.debug("failed to send failure {}", e, extraInfoOnError == null ? "" : "(" + extraInfoOnError + ")"); + } + } + + @Override + public String executor() { + return ThreadPool.Names.SAME; + } +} diff --git a/core/src/main/java/org/elasticsearch/transport/TransportFuture.java b/core/src/main/java/org/elasticsearch/transport/TransportFuture.java index c4bfcb7afea..5d34d0c0338 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportFuture.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportFuture.java @@ -19,8 +19,6 @@ package org.elasticsearch.transport; -import org.elasticsearch.ElasticsearchException; - import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportModule.java b/core/src/main/java/org/elasticsearch/transport/TransportModule.java deleted file mode 100644 index abf90deee81..00000000000 --- a/core/src/main/java/org/elasticsearch/transport/TransportModule.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.transport; - -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.transport.local.LocalTransport; -import org.elasticsearch.transport.netty.NettyTransport; - -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * - */ -public class TransportModule extends AbstractModule { - - public static final String TRANSPORT_TYPE_KEY = "transport.type"; - public static final String TRANSPORT_SERVICE_TYPE_KEY = "transport.service.type"; - - public static final String LOCAL_TRANSPORT = "local"; - public static final String NETTY_TRANSPORT = "netty"; - - private final ESLogger logger; - private final Settings settings; - - private final Map> transportServices = new HashMap<>(); - private final Map> transports = new HashMap<>(); - private Class configuredTransportService; - private Class configuredTransport; - private String configuredTransportServiceSource; - private String configuredTransportSource; - - public TransportModule(Settings settings) { - this.settings = settings; - this.logger = Loggers.getLogger(getClass(), settings); - addTransport(LOCAL_TRANSPORT, LocalTransport.class); - addTransport(NETTY_TRANSPORT, NettyTransport.class); - } - - public void addTransportService(String name, Class clazz) { - Class oldClazz = transportServices.put(name, clazz); - if (oldClazz != null) { - throw new IllegalArgumentException("Cannot register TransportService [" + name + "] to " + clazz.getName() + ", already registered to " + oldClazz.getName()); - } - } - - public void addTransport(String name, Class clazz) { - Class oldClazz = transports.put(name, clazz); - if (oldClazz != null) { - throw new IllegalArgumentException("Cannot register Transport [" + name + "] to " + clazz.getName() + ", already registered to " + oldClazz.getName()); - } - } - - @Override - protected void configure() { - if (configuredTransportService != null) { - logger.info("Using [{}] as transport service, overridden by [{}]", configuredTransportService.getName(), configuredTransportServiceSource); - bind(TransportService.class).to(configuredTransportService).asEagerSingleton(); - } else { - String typeName = settings.get(TRANSPORT_SERVICE_TYPE_KEY); - if (typeName == null) { - bind(TransportService.class).asEagerSingleton(); - } else { - if (transportServices.containsKey(typeName) == false) { - throw new IllegalArgumentException("Unknown TransportService type [" + typeName + "], known types are: " + transportServices.keySet()); - } - bind(TransportService.class).to(transportServices.get(typeName)).asEagerSingleton(); - } - } - - bind(NamedWriteableRegistry.class).asEagerSingleton(); - if (configuredTransport != null) { - logger.info("Using [{}] as transport, overridden by [{}]", configuredTransport.getName(), configuredTransportSource); - bind(Transport.class).to(configuredTransport).asEagerSingleton(); - } else { - String defaultType = DiscoveryNode.localNode(settings) ? LOCAL_TRANSPORT : NETTY_TRANSPORT; - String typeName = settings.get(TRANSPORT_TYPE_KEY, defaultType); - Class clazz = transports.get(typeName); - if (clazz == null) { - throw new IllegalArgumentException("Unknown Transport [" + typeName + "]"); - } - bind(Transport.class).to(clazz).asEagerSingleton(); - } - } - - public void setTransportService(Class transportService, String source) { - Objects.requireNonNull(transportService, "Configured transport service may not be null"); - Objects.requireNonNull(source, "Plugin, that changes transport service may not be null"); - this.configuredTransportService = transportService; - this.configuredTransportServiceSource = source; - } - - public void setTransport(Class transport, String source) { - Objects.requireNonNull(transport, "Configured transport may not be null"); - Objects.requireNonNull(source, "Plugin, that changes transport may not be null"); - this.configuredTransport = transport; - this.configuredTransportSource = source; - } -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java index ddf54179476..d5c1491f1a6 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java @@ -19,6 +19,8 @@ package org.elasticsearch.transport; +import org.elasticsearch.tasks.Task; + /** */ public abstract class TransportRequest extends TransportMessage { @@ -43,4 +45,14 @@ public abstract class TransportRequest extends TransportMessage { + /** + * Override this method if access to the Task parameter is needed + */ + default void messageReceived(final T request, final TransportChannel channel, Task task) throws Exception { + messageReceived(request, channel); + } + void messageReceived(T request, TransportChannel channel) throws Exception; } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 14fc9029b00..5d74c4a408f 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -29,6 +29,8 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -37,7 +39,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; -import org.elasticsearch.node.settings.NodeSettingsService; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -46,11 +48,11 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Function; import java.util.function.Supplier; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; @@ -65,6 +67,7 @@ public class TransportService extends AbstractLifecycleComponent requestHandlers = Collections.emptyMap(); final Object requestHandlerMutex = new Object(); @@ -88,14 +91,14 @@ public class TransportService extends AbstractLifecycleComponent> TRACE_LOG_INCLUDE_SETTING = Setting.listSetting("transport.tracer.include", Collections.emptyList(), Function.identity(), true, Setting.Scope.CLUSTER); + public static final Setting> TRACE_LOG_EXCLUDE_SETTING = Setting.listSetting("transport.tracer.exclude", Arrays.asList("internal:discovery/zen/fd*", TransportLivenessAction.NAME), Function.identity(), true, Setting.Scope.CLUSTER); + private final ESLogger tracerLog; volatile String[] tracerLogInclude; volatile String[] tracelLogExclude; - private final ApplySettings settingsListener = new ApplySettings(); /** if set will call requests sent to this id to shortcut and executed locally */ volatile DiscoveryNode localNode = null; @@ -109,10 +112,11 @@ public class TransportService extends AbstractLifecycleComponent tracerLogInclude) { + this.tracerLogInclude = tracerLogInclude.toArray(Strings.EMPTY_ARRAY); } - // used for testing - public void applySettings(Settings settings) { - settingsListener.onRefreshSettings(settings); + void setTracerLogExclude(List tracelLogExclude) { + this.tracelLogExclude = tracelLogExclude.toArray(Strings.EMPTY_ARRAY); } - @Override protected void doStart() { adapter.rxMetric.clear(); @@ -341,13 +333,13 @@ public class TransportService extends AbstractLifecycleComponent void registerRequestHandler(String action, Supplier requestFactory, String executor, TransportRequestHandler handler) { - RequestHandlerRegistry reg = new RequestHandlerRegistry<>(action, requestFactory, handler, executor, false); + RequestHandlerRegistry reg = new RequestHandlerRegistry<>(action, requestFactory, taskManager, handler, executor, false); registerRequestHandler(reg); } @@ -419,7 +411,7 @@ public class TransportService extends AbstractLifecycleComponent void registerRequestHandler(String action, Supplier request, String executor, boolean forceExecution, TransportRequestHandler handler) { - RequestHandlerRegistry reg = new RequestHandlerRegistry<>(action, request, handler, executor, forceExecution); + RequestHandlerRegistry reg = new RequestHandlerRegistry<>(action, request, taskManager, handler, executor, forceExecution); registerRequestHandler(reg); } @@ -428,7 +420,7 @@ public class TransportService extends AbstractLifecycleComponent implem request.readFrom(stream); if (ThreadPool.Names.SAME.equals(reg.getExecutor())) { //noinspection unchecked - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } else { threadPool.executor(reg.getExecutor()).execute(new AbstractRunnable() { @Override protected void doRun() throws Exception { //noinspection unchecked - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } @Override diff --git a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java index b15add5445e..e1e85e9a12f 100644 --- a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java @@ -21,7 +21,11 @@ package org.elasticsearch.transport.local; import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseOptions; +import org.elasticsearch.transport.TransportServiceAdapter; import org.elasticsearch.transport.support.TransportStatus; import java.io.IOException; @@ -102,6 +106,16 @@ public class LocalTransportChannel implements TransportChannel { sourceTransportServiceAdapter.onResponseSent(requestId, action, error); } + @Override + public long getRequestId() { + return requestId; + } + + @Override + public String getChannelType() { + return "local"; + } + private void writeResponseExceptionHeader(BytesStreamOutput stream) throws IOException { stream.writeLong(requestId); byte status = 0; diff --git a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java index 3bf4fa6701d..8df17f73233 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java @@ -31,10 +31,24 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.ActionNotFoundTransportException; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.RequestHandlerRegistry; +import org.elasticsearch.transport.ResponseHandlerFailureTransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportSerializationException; +import org.elasticsearch.transport.TransportServiceAdapter; +import org.elasticsearch.transport.Transports; import org.elasticsearch.transport.support.TransportStatus; import org.jboss.netty.buffer.ChannelBuffer; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ExceptionEvent; +import org.jboss.netty.channel.MessageEvent; +import org.jboss.netty.channel.SimpleChannelUpstreamHandler; +import org.jboss.netty.channel.WriteCompletionEvent; import java.io.IOException; import java.net.InetSocketAddress; @@ -241,7 +255,7 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { request.readFrom(buffer); if (ThreadPool.Names.SAME.equals(reg.getExecutor())) { //noinspection unchecked - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } else { threadPool.executor(reg.getExecutor()).execute(new RequestHandler(reg, request, transportChannel)); } @@ -296,7 +310,7 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { @SuppressWarnings({"unchecked"}) @Override protected void doRun() throws Exception { - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } @Override diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java index 2f1c52a0ac2..6a6a6c38011 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java @@ -225,7 +225,7 @@ public class NettyTransport extends AbstractLifecycleComponent implem this.connectTimeout = this.settings.getAsTime("transport.netty.connect_timeout", settings.getAsTime("transport.tcp.connect_timeout", settings.getAsTime(TCP_CONNECT_TIMEOUT, TCP_DEFAULT_CONNECT_TIMEOUT))); this.maxCumulationBufferCapacity = this.settings.getAsBytesSize("transport.netty.max_cumulation_buffer_capacity", null); this.maxCompositeBufferComponents = this.settings.getAsInt("transport.netty.max_composite_buffer_components", -1); - this.compress = settings.getAsBoolean(TransportSettings.TRANSPORT_TCP_COMPRESS, false); + this.compress = Transport.TRANSPORT_TCP_COMPRESS.get(settings); this.connectionsPerNodeRecovery = this.settings.getAsInt("transport.netty.connections_per_node.recovery", settings.getAsInt(CONNECTIONS_PER_NODE_RECOVERY, 2)); this.connectionsPerNodeBulk = this.settings.getAsInt("transport.netty.connections_per_node.bulk", settings.getAsInt(CONNECTIONS_PER_NODE_BULK, 3)); @@ -295,7 +295,7 @@ public class NettyTransport extends AbstractLifecycleComponent implem this.serverOpenChannels = openChannels; // extract default profile first and create standard bootstrap - Map profiles = settings.getGroups("transport.profiles", true); + Map profiles = TRANSPORT_PROFILES_SETTING.get(settings()).getAsGroups(true); if (!profiles.containsKey(DEFAULT_PROFILE)) { profiles = new HashMap<>(profiles); profiles.put(DEFAULT_PROFILE, Settings.EMPTY); diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java index fe3a941f665..aaf33c2fd5a 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java @@ -28,14 +28,17 @@ import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.netty.ReleaseChannelFutureListener; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseOptions; +import org.elasticsearch.transport.TransportServiceAdapter; import org.elasticsearch.transport.support.TransportStatus; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import java.io.IOException; -import java.io.NotSerializableException; /** * @@ -129,6 +132,16 @@ public class NettyTransportChannel implements TransportChannel { transportServiceAdapter.onResponseSent(requestId, action, error); } + @Override + public long getRequestId() { + return requestId; + } + + @Override + public String getChannelType() { + return "netty"; + } + /** * Returns the underlying netty channel. This method is intended be used for access to netty to get additional * details when processing the request and may be used by plugins. Responses should be sent using the methods diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index f577415ee6b..78453c9eac6 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -26,7 +26,8 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -36,6 +37,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; @@ -205,142 +207,180 @@ public class TribeService extends AbstractLifecycleComponent { } } - class TribeClusterStateListener implements ClusterStateListener { + class TribeClusterStateListener implements ClusterStateListener { private final String tribeName; + private final TribeNodeClusterStateTaskExecutor executor; TribeClusterStateListener(Node tribeNode) { - this.tribeName = tribeNode.settings().get(TRIBE_NAME); + String tribeName = tribeNode.settings().get(TRIBE_NAME); + this.tribeName = tribeName; + executor = new TribeNodeClusterStateTaskExecutor(tribeName); } @Override public void clusterChanged(final ClusterChangedEvent event) { logger.debug("[{}] received cluster event, [{}]", tribeName, event.source()); - clusterService.submitStateUpdateTask("cluster event from " + tribeName + ", " + event.source(), new ClusterStateUpdateTask() { - @Override - public boolean runOnlyOnMaster() { - return false; + clusterService.submitStateUpdateTask( + "cluster event from " + tribeName + ", " + event.source(), + event, + ClusterStateTaskConfig.build(Priority.NORMAL), + executor, + (source, t) -> logger.warn("failed to process [{}]", t, source)); + } + } + + class TribeNodeClusterStateTaskExecutor implements ClusterStateTaskExecutor { + private final String tribeName; + + TribeNodeClusterStateTaskExecutor(String tribeName) { + this.tribeName = tribeName; + } + + + @Override + public boolean runOnlyOnMaster() { + return false; + } + + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + ClusterState accumulator = ClusterState.builder(currentState).build(); + BatchResult.Builder builder = BatchResult.builder(); + + try { + // we only need to apply the latest cluster state update + accumulator = applyUpdate(accumulator, tasks.get(tasks.size() - 1)); + builder.successes(tasks); + } catch (Throwable t) { + builder.failures(tasks, t); + } + + return builder.build(accumulator); + } + + private ClusterState applyUpdate(ClusterState currentState, ClusterChangedEvent task) { + boolean clusterStateChanged = false; + ClusterState tribeState = task.state(); + DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(currentState.nodes()); + // -- merge nodes + // go over existing nodes, and see if they need to be removed + for (DiscoveryNode discoNode : currentState.nodes()) { + String markedTribeName = discoNode.attributes().get(TRIBE_NAME); + if (markedTribeName != null && markedTribeName.equals(tribeName)) { + if (tribeState.nodes().get(discoNode.id()) == null) { + clusterStateChanged = true; + logger.info("[{}] removing node [{}]", tribeName, discoNode); + nodes.remove(discoNode.id()); + } } + } + // go over tribe nodes, and see if they need to be added + for (DiscoveryNode tribe : tribeState.nodes()) { + if (currentState.nodes().get(tribe.id()) == null) { + // a new node, add it, but also add the tribe name to the attributes + Map tribeAttr = new HashMap<>(); + for (ObjectObjectCursor attr : tribe.attributes()) { + tribeAttr.put(attr.key, attr.value); + } + tribeAttr.put(TRIBE_NAME, tribeName); + DiscoveryNode discoNode = new DiscoveryNode(tribe.name(), tribe.id(), tribe.getHostName(), tribe.getHostAddress(), tribe.address(), unmodifiableMap(tribeAttr), tribe.version()); + clusterStateChanged = true; + logger.info("[{}] adding node [{}]", tribeName, discoNode); + nodes.put(discoNode); + } + } - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - ClusterState tribeState = event.state(); - DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(currentState.nodes()); - // -- merge nodes - // go over existing nodes, and see if they need to be removed - for (DiscoveryNode discoNode : currentState.nodes()) { - String markedTribeName = discoNode.attributes().get(TRIBE_NAME); - if (markedTribeName != null && markedTribeName.equals(tribeName)) { - if (tribeState.nodes().get(discoNode.id()) == null) { - logger.info("[{}] removing node [{}]", tribeName, discoNode); - nodes.remove(discoNode.id()); - } - } + // -- merge metadata + ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + MetaData.Builder metaData = MetaData.builder(currentState.metaData()); + RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + // go over existing indices, and see if they need to be removed + for (IndexMetaData index : currentState.metaData()) { + String markedTribeName = index.getSettings().get(TRIBE_NAME); + if (markedTribeName != null && markedTribeName.equals(tribeName)) { + IndexMetaData tribeIndex = tribeState.metaData().index(index.getIndex()); + clusterStateChanged = true; + if (tribeIndex == null || tribeIndex.getState() == IndexMetaData.State.CLOSE) { + logger.info("[{}] removing index [{}]", tribeName, index.getIndex()); + removeIndex(blocks, metaData, routingTable, index); + } else { + // always make sure to update the metadata and routing table, in case + // there are changes in them (new mapping, shards moving from initializing to started) + routingTable.add(tribeState.routingTable().index(index.getIndex())); + Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build(); + metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings)); } - // go over tribe nodes, and see if they need to be added - for (DiscoveryNode tribe : tribeState.nodes()) { - if (currentState.nodes().get(tribe.id()) == null) { - // a new node, add it, but also add the tribe name to the attributes - Map tribeAttr = new HashMap<>(); - for (ObjectObjectCursor attr : tribe.attributes()) { - tribeAttr.put(attr.key, attr.value); - } - tribeAttr.put(TRIBE_NAME, tribeName); - DiscoveryNode discoNode = new DiscoveryNode(tribe.name(), tribe.id(), tribe.getHostName(), tribe.getHostAddress(), tribe.address(), unmodifiableMap(tribeAttr), tribe.version()); - logger.info("[{}] adding node [{}]", tribeName, discoNode); - nodes.put(discoNode); - } + } + } + // go over tribe one, and see if they need to be added + for (IndexMetaData tribeIndex : tribeState.metaData()) { + // if there is no routing table yet, do nothing with it... + IndexRoutingTable table = tribeState.routingTable().index(tribeIndex.getIndex()); + if (table == null) { + continue; + } + final IndexMetaData indexMetaData = currentState.metaData().index(tribeIndex.getIndex()); + if (indexMetaData == null) { + if (!droppedIndices.contains(tribeIndex.getIndex())) { + // a new index, add it, and add the tribe name as a setting + clusterStateChanged = true; + logger.info("[{}] adding index [{}]", tribeName, tribeIndex.getIndex()); + addNewIndex(tribeState, blocks, metaData, routingTable, tribeIndex); } - - // -- merge metadata - ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - MetaData.Builder metaData = MetaData.builder(currentState.metaData()); - RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); - // go over existing indices, and see if they need to be removed - for (IndexMetaData index : currentState.metaData()) { - String markedTribeName = index.getSettings().get(TRIBE_NAME); - if (markedTribeName != null && markedTribeName.equals(tribeName)) { - IndexMetaData tribeIndex = tribeState.metaData().index(index.getIndex()); - if (tribeIndex == null || tribeIndex.getState() == IndexMetaData.State.CLOSE) { - logger.info("[{}] removing index [{}]", tribeName, index.getIndex()); - removeIndex(blocks, metaData, routingTable, index); - } else { - // always make sure to update the metadata and routing table, in case - // there are changes in them (new mapping, shards moving from initializing to started) - routingTable.add(tribeState.routingTable().index(index.getIndex())); - Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build(); - metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings)); - } - } - } - // go over tribe one, and see if they need to be added - for (IndexMetaData tribeIndex : tribeState.metaData()) { - // if there is no routing table yet, do nothing with it... - IndexRoutingTable table = tribeState.routingTable().index(tribeIndex.getIndex()); - if (table == null) { - continue; - } - final IndexMetaData indexMetaData = currentState.metaData().index(tribeIndex.getIndex()); - if (indexMetaData == null) { - if (!droppedIndices.contains(tribeIndex.getIndex())) { - // a new index, add it, and add the tribe name as a setting - logger.info("[{}] adding index [{}]", tribeName, tribeIndex.getIndex()); + } else { + String existingFromTribe = indexMetaData.getSettings().get(TRIBE_NAME); + if (!tribeName.equals(existingFromTribe)) { + // we have a potential conflict on index names, decide what to do... + if (ON_CONFLICT_ANY.equals(onConflict)) { + // we chose any tribe, carry on + } else if (ON_CONFLICT_DROP.equals(onConflict)) { + // drop the indices, there is a conflict + clusterStateChanged = true; + logger.info("[{}] dropping index [{}] due to conflict with [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); + removeIndex(blocks, metaData, routingTable, tribeIndex); + droppedIndices.add(tribeIndex.getIndex()); + } else if (onConflict.startsWith(ON_CONFLICT_PREFER)) { + // on conflict, prefer a tribe... + String preferredTribeName = onConflict.substring(ON_CONFLICT_PREFER.length()); + if (tribeName.equals(preferredTribeName)) { + // the new one is hte preferred one, replace... + clusterStateChanged = true; + logger.info("[{}] adding index [{}], preferred over [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); + removeIndex(blocks, metaData, routingTable, tribeIndex); addNewIndex(tribeState, blocks, metaData, routingTable, tribeIndex); - } - } else { - String existingFromTribe = indexMetaData.getSettings().get(TRIBE_NAME); - if (!tribeName.equals(existingFromTribe)) { - // we have a potential conflict on index names, decide what to do... - if (ON_CONFLICT_ANY.equals(onConflict)) { - // we chose any tribe, carry on - } else if (ON_CONFLICT_DROP.equals(onConflict)) { - // drop the indices, there is a conflict - logger.info("[{}] dropping index [{}] due to conflict with [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); - removeIndex(blocks, metaData, routingTable, tribeIndex); - droppedIndices.add(tribeIndex.getIndex()); - } else if (onConflict.startsWith(ON_CONFLICT_PREFER)) { - // on conflict, prefer a tribe... - String preferredTribeName = onConflict.substring(ON_CONFLICT_PREFER.length()); - if (tribeName.equals(preferredTribeName)) { - // the new one is hte preferred one, replace... - logger.info("[{}] adding index [{}], preferred over [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); - removeIndex(blocks, metaData, routingTable, tribeIndex); - addNewIndex(tribeState, blocks, metaData, routingTable, tribeIndex); - } // else: either the existing one is the preferred one, or we haven't seen one, carry on - } - } + } // else: either the existing one is the preferred one, or we haven't seen one, carry on } } - - return ClusterState.builder(currentState).incrementVersion().blocks(blocks).nodes(nodes).metaData(metaData).routingTable(routingTable.build()).build(); } + } - private void removeIndex(ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData index) { - metaData.remove(index.getIndex()); - routingTable.remove(index.getIndex()); - blocks.removeIndexBlocks(index.getIndex()); - } + if (!clusterStateChanged) { + return currentState; + } else { + return ClusterState.builder(currentState).incrementVersion().blocks(blocks).nodes(nodes).metaData(metaData).routingTable(routingTable.build()).build(); + } + } - private void addNewIndex(ClusterState tribeState, ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData tribeIndex) { - Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build(); - metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings)); - routingTable.add(tribeState.routingTable().index(tribeIndex.getIndex())); - if (Regex.simpleMatch(blockIndicesMetadata, tribeIndex.getIndex())) { - blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK); - } - if (Regex.simpleMatch(blockIndicesRead, tribeIndex.getIndex())) { - blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_READ_BLOCK); - } - if (Regex.simpleMatch(blockIndicesWrite, tribeIndex.getIndex())) { - blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK); - } - } + private void removeIndex(ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData index) { + metaData.remove(index.getIndex()); + routingTable.remove(index.getIndex()); + blocks.removeIndexBlocks(index.getIndex()); + } - @Override - public void onFailure(String source, Throwable t) { - logger.warn("failed to process [{}]", t, source); - } - }); + private void addNewIndex(ClusterState tribeState, ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData tribeIndex) { + Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build(); + metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings)); + routingTable.add(tribeState.routingTable().index(tribeIndex.getIndex())); + if (Regex.simpleMatch(blockIndicesMetadata, tribeIndex.getIndex())) { + blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK); + } + if (Regex.simpleMatch(blockIndicesRead, tribeIndex.getIndex())) { + blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_READ_BLOCK); + } + if (Regex.simpleMatch(blockIndicesWrite, tribeIndex.getIndex())) { + blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK); + } } } } diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 26785010110..151c91f5be2 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1719088.jar}" { +grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1721183.jar}" { // needed to allow MMapDirectory's "unmap hack" permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index b5f9c24d04f..419c666d55e 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1719088.jar}" { +grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1721183.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy b/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy index 8e7ca8d8b6e..8078516c7d5 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy @@ -26,10 +26,6 @@ grant { // groovy IndyInterface bootstrap requires this property for indy logging permission java.util.PropertyPermission "groovy.indy.logging", "read"; - // groovy JsonOutput, just allow it to read these props so it works (unsafe is not allowed) - permission java.util.PropertyPermission "groovy.json.faststringutils.disable", "read"; - permission java.util.PropertyPermission "groovy.json.faststringutils.write.to.final.fields", "read"; - // needed by Rhino engine exception handling permission java.util.PropertyPermission "rhino.stack.style", "read"; diff --git a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java index a287ec119e7..725b1bd4400 100644 --- a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java @@ -23,17 +23,32 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.*; -import org.apache.lucene.search.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.MultiReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.BM25Similarity; -import org.apache.lucene.search.similarities.DefaultSimilarity; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.Directory; import org.apache.lucene.util.TestUtil; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; @@ -199,7 +214,7 @@ public class BlendedTermQueryTests extends ESTestCase { } public IndexSearcher setSimilarity(IndexSearcher searcher) { - Similarity similarity = random().nextBoolean() ? new BM25Similarity() : new DefaultSimilarity(); + Similarity similarity = random().nextBoolean() ? new BM25Similarity() : new ClassicSimilarity(); searcher.setSimilarity(similarity); return searcher; } diff --git a/core/src/test/java/org/elasticsearch/ESExceptionTests.java b/core/src/test/java/org/elasticsearch/ESExceptionTests.java index 91be1f339dd..a547e18de52 100644 --- a/core/src/test/java/org/elasticsearch/ESExceptionTests.java +++ b/core/src/test/java/org/elasticsearch/ESExceptionTests.java @@ -24,7 +24,6 @@ import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.LockObtainFailedException; -import org.apache.lucene.util.Constants; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.ParsingException; diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 46cdea3dadf..975de9e8f0e 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -18,9 +18,6 @@ */ package org.elasticsearch; -import com.fasterxml.jackson.core.JsonLocation; -import com.fasterxml.jackson.core.JsonParseException; -import org.apache.lucene.util.Constants; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.TimestampParsingException; @@ -30,7 +27,12 @@ import org.elasticsearch.client.AbstractClientHeadersTestCase; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IllegalShardRoutingStateException; +import org.elasticsearch.cluster.routing.RoutingTableValidation; +import org.elasticsearch.cluster.routing.RoutingValidationException; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.io.PathUtils; @@ -82,11 +84,20 @@ import org.elasticsearch.transport.ConnectTransportException; import java.io.IOException; import java.lang.reflect.Modifier; import java.net.URISyntaxException; +import java.nio.file.AccessDeniedException; +import java.nio.file.AtomicMoveNotSupportedException; +import java.nio.file.DirectoryNotEmptyException; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.FileSystemException; +import java.nio.file.FileSystemLoopException; import java.nio.file.FileVisitResult; import java.nio.file.FileVisitor; import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.nio.file.NotDirectoryException; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -544,17 +555,17 @@ public class ExceptionSerializationTests extends ESTestCase { assertEquals("{\"type\":\"illegal_argument_exception\",\"reason\":\"nono!\"}", toXContent(ex)); Throwable[] unknowns = new Throwable[]{ - new JsonParseException("foobar", new JsonLocation(new Object(), 1, 2, 3, 4)), + new Exception("foobar"), new ClassCastException("boom boom boom"), - new IOException("booom") + new UnsatisfiedLinkError("booom") }; for (Throwable t : unknowns) { if (randomBoolean()) { - t.addSuppressed(new IOException("suppressed")); + t.addSuppressed(new UnsatisfiedLinkError("suppressed")); t.addSuppressed(new NullPointerException()); } Throwable deserialized = serialize(t); - assertTrue(deserialized instanceof NotSerializableExceptionWrapper); + assertTrue(deserialized.getClass().toString(), deserialized instanceof NotSerializableExceptionWrapper); assertArrayEquals(t.getStackTrace(), deserialized.getStackTrace()); assertEquals(t.getSuppressed().length, deserialized.getSuppressed().length); if (t.getSuppressed().length > 0) { @@ -791,4 +802,36 @@ public class ExceptionSerializationTests extends ESTestCase { } } } + + public void testIOException() throws IOException { + IOException serialize = serialize(new IOException("boom", new NullPointerException())); + assertEquals("boom", serialize.getMessage()); + assertTrue(serialize.getCause() instanceof NullPointerException); + } + + + public void testFileSystemExceptions() throws IOException { + for (FileSystemException ex : Arrays.asList(new FileSystemException("a", "b", "c"), + new NoSuchFileException("a", "b", "c"), + new NotDirectoryException("a"), + new DirectoryNotEmptyException("a"), + new AtomicMoveNotSupportedException("a", "b", "c"), + new FileAlreadyExistsException("a", "b", "c"), + new AccessDeniedException("a", "b", "c"), + new FileSystemLoopException("a"))) { + + FileSystemException serialize = serialize(ex); + assertEquals(serialize.getClass(), ex.getClass()); + assertEquals("a", serialize.getFile()); + if (serialize.getClass() == NotDirectoryException.class || + serialize.getClass() == FileSystemLoopException.class || + serialize.getClass() == DirectoryNotEmptyException.class) { + assertNull(serialize.getOtherFile()); + assertNull(serialize.getReason()); + } else { + assertEquals(serialize.getClass().toString(), "b", serialize.getOtherFile()); + assertEquals(serialize.getClass().toString(), "c", serialize.getReason()); + } + } + } } diff --git a/core/src/test/java/org/elasticsearch/NamingConventionTests.java b/core/src/test/java/org/elasticsearch/NamingConventionTests.java index 912f8922b07..41d67b88390 100644 --- a/core/src/test/java/org/elasticsearch/NamingConventionTests.java +++ b/core/src/test/java/org/elasticsearch/NamingConventionTests.java @@ -19,7 +19,6 @@ package org.elasticsearch; import junit.framework.TestCase; - import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java b/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java index a6217d7ea64..6c11bc35dec 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.action.admin; +import org.apache.lucene.util.Constants; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequestBuilder; @@ -40,6 +41,7 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.lessThan; public class HotThreadsIT extends ESIntegTestCase { + public void testHotThreadsDontFail() throws ExecutionException, InterruptedException { /** * This test just checks if nothing crashes or gets stuck etc. @@ -125,6 +127,7 @@ public class HotThreadsIT extends ESIntegTestCase { } public void testIgnoreIdleThreads() throws ExecutionException, InterruptedException { + assumeTrue("no support for hot_threads on FreeBSD", Constants.FREE_BSD == false); // First time, don't ignore idle threads: NodesHotThreadsRequestBuilder builder = client().admin().cluster().prepareNodesHotThreads(); @@ -158,12 +161,19 @@ public class HotThreadsIT extends ESIntegTestCase { NodesHotThreadsResponse response = client().admin().cluster().prepareNodesHotThreads().execute().get(); - for (NodeHotThreads node : response.getNodesMap().values()) { - String result = node.getHotThreads(); - assertTrue(result.indexOf("Hot threads at") != -1); - assertTrue(result.indexOf("interval=500ms") != -1); - assertTrue(result.indexOf("busiestThreads=3") != -1); - assertTrue(result.indexOf("ignoreIdleThreads=true") != -1); + if (Constants.FREE_BSD) { + for (NodeHotThreads node : response.getNodesMap().values()) { + String result = node.getHotThreads(); + assertTrue(result.indexOf("hot_threads is not supported") != -1); + } + } else { + for (NodeHotThreads node : response.getNodesMap().values()) { + String result = node.getHotThreads(); + assertTrue(result.indexOf("Hot threads at") != -1); + assertTrue(result.indexOf("interval=500ms") != -1); + assertTrue(result.indexOf("busiestThreads=3") != -1); + assertTrue(result.indexOf("ignoreIdleThreads=true") != -1); + } } } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java index a4d089c0f82..e66036cd0d1 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java @@ -33,7 +33,9 @@ import org.hamcrest.Matchers; import java.io.IOException; import static org.hamcrest.CoreMatchers.allOf; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class ClusterHealthResponsesTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java new file mode 100644 index 00000000000..4228c9fa699 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.cluster.node.tasks; + +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.test.ESIntegTestCase; + +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +/** + * Integration tests for task management API + */ +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) +public class TasksIT extends ESIntegTestCase { + + public void testTaskCounts() { + // Run only on data nodes + ListTasksResponse response = client().admin().cluster().prepareListTasks("data:true").setActions(ListTasksAction.NAME + "[n]").get(); + assertThat(response.getTasks().size(), greaterThanOrEqualTo(cluster().numDataNodes())); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java new file mode 100644 index 00000000000..55c10aa298e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -0,0 +1,664 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.cluster.node.tasks; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.ChildTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.TestClusterService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.local.LocalTransport; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReferenceArray; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.not; + +public class TransportTasksActionTests extends ESTestCase { + + private static ThreadPool threadPool; + private static final ClusterName clusterName = new ClusterName("test-cluster"); + private TestNode[] testNodes; + private int nodesCount; + + @BeforeClass + public static void beforeClass() { + threadPool = new ThreadPool(TransportTasksActionTests.class.getSimpleName()); + } + + @AfterClass + public static void afterClass() { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; + } + + @Before + public final void setupTestNodes() throws Exception { + nodesCount = randomIntBetween(2, 10); + testNodes = new TestNode[nodesCount]; + for (int i = 0; i < testNodes.length; i++) { + testNodes[i] = new TestNode("node" + i, threadPool, Settings.EMPTY); + } + } + + @After + public final void shutdownTestNodes() throws Exception { + for (TestNode testNode : testNodes) { + testNode.close(); + } + } + + private static class TestNode implements Releasable { + public TestNode(String name, ThreadPool threadPool, Settings settings) { + clusterService = new TestClusterService(threadPool); + transportService = new TransportService(Settings.EMPTY, + new LocalTransport(Settings.EMPTY, threadPool, Version.CURRENT, new NamedWriteableRegistry()), + threadPool); + transportService.start(); + discoveryNode = new DiscoveryNode(name, transportService.boundAddress().publishAddress(), Version.CURRENT); + transportListTasksAction = new TransportListTasksAction(settings, clusterName, threadPool, clusterService, transportService, + new ActionFilters(Collections.emptySet()), new IndexNameExpressionResolver(settings)); + } + + public final TestClusterService clusterService; + public final TransportService transportService; + public final DiscoveryNode discoveryNode; + public final TransportListTasksAction transportListTasksAction; + + @Override + public void close() { + transportService.close(); + } + } + + public static void connectNodes(TestNode... nodes) { + DiscoveryNode[] discoveryNodes = new DiscoveryNode[nodes.length]; + for (int i = 0; i < nodes.length; i++) { + discoveryNodes[i] = nodes[i].discoveryNode; + } + DiscoveryNode master = discoveryNodes[0]; + for (TestNode node : nodes) { + node.clusterService.setState(ClusterStateCreationUtils.state(node.discoveryNode, master, discoveryNodes)); + } + for (TestNode nodeA : nodes) { + for (TestNode nodeB : nodes) { + nodeA.transportService.connectToNode(nodeB.discoveryNode); + } + } + } + + public static class NodeRequest extends BaseNodeRequest { + protected String requestName; + private boolean enableTaskManager; + + public NodeRequest() { + super(); + } + + public NodeRequest(NodesRequest request, String nodeId) { + super(request, nodeId); + requestName = request.requestName; + enableTaskManager = request.enableTaskManager; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + requestName = in.readString(); + enableTaskManager = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(requestName); + out.writeBoolean(enableTaskManager); + } + + @Override + public String getDescription() { + return "NodeRequest[" + requestName + ", " + enableTaskManager + "]"; + } + + @Override + public Task createTask(long id, String type, String action) { + if (enableTaskManager) { + return super.createTask(id, type, action); + } else { + return null; + } + } + } + + public static class NodesRequest extends BaseNodesRequest { + private String requestName; + private boolean enableTaskManager; + + private NodesRequest() { + super(); + } + + public NodesRequest(String requestName, String... nodesIds) { + this(requestName, true, nodesIds); + } + + public NodesRequest(String requestName, boolean enableTaskManager, String... nodesIds) { + super(nodesIds); + this.requestName = requestName; + this.enableTaskManager = enableTaskManager; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + requestName = in.readString(); + enableTaskManager = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(requestName); + out.writeBoolean(enableTaskManager); + } + + @Override + public String getDescription() { + return "NodesRequest[" + requestName + ", " + enableTaskManager + "]"; + } + + @Override + public Task createTask(long id, String type, String action) { + if (enableTaskManager) { + return super.createTask(id, type, action); + } else { + return null; + } + } + } + + static class NodeResponse extends BaseNodeResponse { + + protected NodeResponse() { + super(); + } + + protected NodeResponse(DiscoveryNode node) { + super(node); + } + } + + static class NodesResponse extends BaseNodesResponse { + + private int failureCount; + + protected NodesResponse(ClusterName clusterName, NodeResponse[] nodes, int failureCount) { + super(clusterName, nodes); + this.failureCount = failureCount; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + failureCount = in.readVInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(failureCount); + } + + public int failureCount() { + return failureCount; + } + } + + /** + * Simulates node-based task that can be used to block node tasks so they are guaranteed to be registered by task manager + */ + abstract class TestNodesAction extends TransportNodesAction { + + TestNodesAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool, + ClusterService clusterService, TransportService transportService) { + super(settings, actionName, clusterName, threadPool, clusterService, transportService, + new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), + NodesRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC); + } + + @Override + protected NodesResponse newResponse(NodesRequest request, AtomicReferenceArray responses) { + final List nodesList = new ArrayList<>(); + int failureCount = 0; + for (int i = 0; i < responses.length(); i++) { + Object resp = responses.get(i); + if (resp instanceof NodeResponse) { // will also filter out null response for unallocated ones + nodesList.add((NodeResponse) resp); + } else if (resp instanceof FailedNodeException) { + failureCount++; + } else { + logger.warn("unknown response type [{}], expected NodeLocalGatewayMetaState or FailedNodeException", resp); + } + } + return new NodesResponse(clusterName, nodesList.toArray(new NodeResponse[nodesList.size()]), failureCount); + } + + @Override + protected NodeRequest newNodeRequest(String nodeId, NodesRequest request) { + return new NodeRequest(request, nodeId); + } + + @Override + protected NodeResponse newNodeResponse() { + return new NodeResponse(); + } + + @Override + protected abstract NodeResponse nodeOperation(NodeRequest request); + + @Override + protected boolean accumulateExceptions() { + return true; + } + } + + static class TestTaskResponse implements Writeable { + + private final String status; + + public TestTaskResponse(StreamInput in) throws IOException { + status = in.readString(); + } + + public TestTaskResponse(String status) { + this.status = status; + } + + public String getStatus() { + return status; + } + + @Override + public TestTaskResponse readFrom(StreamInput in) throws IOException { + return new TestTaskResponse(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(status); + } + } + + + static class TestTasksRequest extends BaseTasksRequest { + + } + + static class TestTasksResponse extends BaseTasksResponse { + + private List tasks; + + public TestTasksResponse() { + + } + + public TestTasksResponse(List tasks, List taskFailures, List nodeFailures) { + super(taskFailures, nodeFailures); + this.tasks = tasks == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(tasks)); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + int taskCount = in.readVInt(); + List builder = new ArrayList<>(); + for (int i = 0; i < taskCount; i++) { + builder.add(new TestTaskResponse(in)); + } + tasks = Collections.unmodifiableList(builder); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(tasks.size()); + for (TestTaskResponse task : tasks) { + task.writeTo(out); + } + } + } + + /** + * Test class for testing task operations + */ + static abstract class TestTasksAction extends TransportTasksAction { + + protected TestTasksAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService) { + super(settings, actionName, clusterName, threadPool, clusterService, transportService, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), + TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); + } + + @Override + protected TestTasksResponse newResponse(TestTasksRequest request, List tasks, List taskOperationFailures, List failedNodeExceptions) { + return new TestTasksResponse(tasks, taskOperationFailures, failedNodeExceptions); + } + + @Override + protected TestTaskResponse readTaskResponse(StreamInput in) throws IOException { + return new TestTaskResponse(in); + } + + @Override + protected boolean accumulateExceptions() { + return true; + } + } + + private ActionFuture startBlockingTestNodesAction(CountDownLatch checkLatch) throws InterruptedException { + return startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request")); + } + + private ActionFuture startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request) throws InterruptedException { + CountDownLatch actionLatch = new CountDownLatch(nodesCount); + TestNodesAction[] actions = new TestNodesAction[nodesCount]; + for (int i = 0; i < testNodes.length; i++) { + final int node = i; + actions[i] = new TestNodesAction(Settings.EMPTY, "testAction", clusterName, threadPool, testNodes[i].clusterService, testNodes[i].transportService) { + @Override + protected NodeResponse nodeOperation(NodeRequest request) { + logger.info("Action on node " + node); + actionLatch.countDown(); + try { + checkLatch.await(); + } catch (InterruptedException ex) { + Thread.currentThread().interrupt(); + } + logger.info("Action on node " + node + " finished"); + return new NodeResponse(testNodes[node].discoveryNode); + } + }; + } + // Make sure no tasks are running + for (TestNode node : testNodes) { + assertEquals(0, node.transportService.getTaskManager().getTasks().size()); + } + ActionFuture future = actions[0].execute(request); + logger.info("Awaiting for all actions to start"); + actionLatch.await(); + logger.info("Done waiting for all actions to start"); + return future; + } + + public void testRunningTasksCount() throws Exception { + connectNodes(testNodes); + CountDownLatch checkLatch = new CountDownLatch(1); + ActionFuture future = startBlockingTestNodesAction(checkLatch); + + // Check task counts using taskManager + Map localTasks = testNodes[0].transportService.getTaskManager().getTasks(); + assertEquals(2, localTasks.size()); // all node tasks + 1 coordinating task + Task coordinatingTask = localTasks.get(Collections.min(localTasks.keySet())); + Task subTask = localTasks.get(Collections.max(localTasks.keySet())); + assertThat(subTask.getAction(), endsWith("[n]")); + assertThat(coordinatingTask.getAction(), not(endsWith("[n]"))); + for (int i = 1; i < testNodes.length; i++) { + Map remoteTasks = testNodes[i].transportService.getTaskManager().getTasks(); + assertEquals(1, remoteTasks.size()); + Task remoteTask = remoteTasks.values().iterator().next(); + assertThat(remoteTask.getAction(), endsWith("[n]")); + } + + // Check task counts using transport + int testNodeNum = randomIntBetween(0, testNodes.length - 1); + TestNode testNode = testNodes[testNodeNum]; + ListTasksRequest listTasksRequest = new ListTasksRequest(); + listTasksRequest.actions("testAction*"); // pick all test actions + logger.info("Listing currently running tasks using node [{}]", testNodeNum); + ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + logger.info("Checking currently running tasks"); + assertEquals(testNodes.length, response.getPerNodeTasks().size()); + + // Coordinating node + assertEquals(2, response.getPerNodeTasks().get(testNodes[0].discoveryNode).size()); + // Other nodes node + for (int i = 1; i < testNodes.length; i++) { + assertEquals(1, response.getPerNodeTasks().get(testNodes[i].discoveryNode).size()); + } + + // Check task counts using transport with filtering + testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; + listTasksRequest = new ListTasksRequest(); + listTasksRequest.actions("testAction[n]"); // only pick node actions + response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(testNodes.length, response.getPerNodeTasks().size()); + for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { + assertEquals(1, entry.getValue().size()); + assertNull(entry.getValue().get(0).getDescription()); + } + + // Check task counts using transport with detailed description + listTasksRequest.detailed(true); // same request only with detailed description + response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(testNodes.length, response.getPerNodeTasks().size()); + for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { + assertEquals(1, entry.getValue().size()); + assertEquals("NodeRequest[Test Request, true]", entry.getValue().get(0).getDescription()); + } + + // Release all tasks and wait for response + checkLatch.countDown(); + NodesResponse responses = future.get(); + assertEquals(0, responses.failureCount()); + + // Make sure that we don't have any lingering tasks + for (TestNode node : testNodes) { + assertEquals(0, node.transportService.getTaskManager().getTasks().size()); + } + } + + public void testFindChildTasks() throws Exception { + connectNodes(testNodes); + CountDownLatch checkLatch = new CountDownLatch(1); + ActionFuture future = startBlockingTestNodesAction(checkLatch); + + TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; + + // Get the parent task + ListTasksRequest listTasksRequest = new ListTasksRequest(); + listTasksRequest.actions("testAction"); + ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(1, response.getTasks().size()); + String parentNode = response.getTasks().get(0).getNode().getId(); + long parentTaskId = response.getTasks().get(0).getId(); + + // Find tasks with common parent + listTasksRequest = new ListTasksRequest(); + listTasksRequest.parentNode(parentNode); + listTasksRequest.parentTaskId(parentTaskId); + response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(testNodes.length, response.getTasks().size()); + for (TaskInfo task : response.getTasks()) { + assertEquals("testAction[n]", task.getAction()); + assertEquals(parentNode, task.getParentNode()); + assertEquals(parentTaskId, task.getParentId()); + } + + // Release all tasks and wait for response + checkLatch.countDown(); + NodesResponse responses = future.get(); + assertEquals(0, responses.failureCount()); + } + + public void testTaskManagementOptOut() throws Exception { + connectNodes(testNodes); + CountDownLatch checkLatch = new CountDownLatch(1); + // Starting actions that disable task manager + ActionFuture future = startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request", false)); + + TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; + + // Get the parent task + ListTasksRequest listTasksRequest = new ListTasksRequest(); + listTasksRequest.actions("testAction*"); + ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(0, response.getTasks().size()); + + // Release all tasks and wait for response + checkLatch.countDown(); + NodesResponse responses = future.get(); + assertEquals(0, responses.failureCount()); + } + + public void testTasksDescriptions() throws Exception { + connectNodes(testNodes); + CountDownLatch checkLatch = new CountDownLatch(1); + ActionFuture future = startBlockingTestNodesAction(checkLatch); + + // Check task counts using transport with filtering + TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; + ListTasksRequest listTasksRequest = new ListTasksRequest(); + listTasksRequest.actions("testAction[n]"); // only pick node actions + ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(testNodes.length, response.getPerNodeTasks().size()); + for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { + assertEquals(1, entry.getValue().size()); + assertNull(entry.getValue().get(0).getDescription()); + } + + // Check task counts using transport with detailed description + listTasksRequest.detailed(true); // same request only with detailed description + response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(testNodes.length, response.getPerNodeTasks().size()); + for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { + assertEquals(1, entry.getValue().size()); + assertEquals("NodeRequest[Test Request, true]", entry.getValue().get(0).getDescription()); + } + + // Release all tasks and wait for response + checkLatch.countDown(); + NodesResponse responses = future.get(); + assertEquals(0, responses.failureCount()); + } + + public void testFailedTasksCount() throws ExecutionException, InterruptedException, IOException { + connectNodes(testNodes); + TestNodesAction[] actions = new TestNodesAction[nodesCount]; + for (int i = 0; i < testNodes.length; i++) { + final int node = i; + actions[i] = new TestNodesAction(Settings.EMPTY, "testAction", clusterName, threadPool, testNodes[i].clusterService, testNodes[i].transportService) { + @Override + protected NodeResponse nodeOperation(NodeRequest request) { + logger.info("Action on node " + node); + throw new RuntimeException("Test exception"); + } + }; + } + + for (TestNode testNode : testNodes) { + assertEquals(0, testNode.transportService.getTaskManager().getTasks().size()); + } + NodesRequest request = new NodesRequest("Test Request"); + NodesResponse responses = actions[0].execute(request).get(); + assertEquals(nodesCount, responses.failureCount()); + } + + public void testTaskLevelActionFailures() throws ExecutionException, InterruptedException, IOException { + connectNodes(testNodes); + CountDownLatch checkLatch = new CountDownLatch(1); + ActionFuture future = startBlockingTestNodesAction(checkLatch); + + TestTasksAction[] tasksActions = new TestTasksAction[nodesCount]; + final int failTaskOnNode = randomIntBetween(1, nodesCount - 1); + for (int i = 0; i < testNodes.length; i++) { + final int node = i; + // Simulate task action that fails on one of the tasks on one of the nodes + tasksActions[i] = new TestTasksAction(Settings.EMPTY, "testTasksAction", clusterName, threadPool, testNodes[i].clusterService, testNodes[i].transportService) { + @Override + protected TestTaskResponse taskOperation(TestTasksRequest request, Task task) { + logger.info("Task action on node " + node); + if (failTaskOnNode == node && ((ChildTask) task).getParentNode() != null) { + logger.info("Failing on node " + node); + throw new RuntimeException("Task level failure"); + } + return new TestTaskResponse("Success on node " + node); + } + }; + } + + // Run task action on node tasks that are currently running + // should be successful on all nodes except one + TestTasksRequest testTasksRequest = new TestTasksRequest(); + testTasksRequest.actions("testAction[n]"); // pick all test actions + TestTasksResponse response = tasksActions[0].execute(testTasksRequest).get(); + // Get successful responses from all nodes except one + assertEquals(testNodes.length - 1, response.tasks.size()); + assertEquals(1, response.getTaskFailures().size()); // one task failed + assertThat(response.getTaskFailures().get(0).getReason(), containsString("Task level failure")); + assertEquals(0, response.getNodeFailures().size()); // no nodes failed + + // Release all node tasks and wait for response + checkLatch.countDown(); + NodesResponse responses = future.get(); + assertEquals(0, responses.failureCount()); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java new file mode 100644 index 00000000000..bd1377b89fe --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java @@ -0,0 +1,126 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.cluster.settings; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.atomic.AtomicReference; + +public class SettingsUpdaterTests extends ESTestCase { + + + public void testUpdateSetting() { + AtomicReference index = new AtomicReference<>(); + AtomicReference shard = new AtomicReference<>(); + ClusterState.Builder builder = ClusterState.builder(new ClusterName("foo")); + ClusterSettings settingsService = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, index::set); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, shard::set); + SettingsUpdater updater = new SettingsUpdater(settingsService); + MetaData.Builder metaData = MetaData.builder() + .persistentSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 2.5).build()) + .transientSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 3.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 4.5).build()); + ClusterState build = builder.metaData(metaData).build(); + ClusterState clusterState = updater.updateSettings(build, Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.5).build(), + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.4).build()); + assertNotSame(clusterState, build); + assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 0.4, 0.1); + assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 2.5, 0.1); + assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().transientSettings()), 0.5, 0.1); + assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().transientSettings()), 4.5, 0.1); + + clusterState = updater.updateSettings(clusterState, Settings.builder().putNull("cluster.routing.*").build(), + Settings.EMPTY); + assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 0.4, 0.1); + assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 2.5, 0.1); + assertFalse(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); + assertFalse(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); + + clusterState = updater.updateSettings(clusterState, + Settings.EMPTY, Settings.builder().putNull("cluster.routing.*").put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 10.0).build()); + + assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 10.0, 0.1); + assertFalse(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().persistentSettings())); + assertFalse(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); + assertFalse(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); + assertNull("updater only does a dryRun", index.get()); + assertNull("updater only does a dryRun", shard.get()); + } + + public void testAllOrNothing() { + ClusterState.Builder builder = ClusterState.builder(new ClusterName("foo")); + ClusterSettings settingsService = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + AtomicReference index = new AtomicReference<>(); + AtomicReference shard = new AtomicReference<>(); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, index::set); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, shard::set); + SettingsUpdater updater = new SettingsUpdater(settingsService); + MetaData.Builder metaData = MetaData.builder() + .persistentSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 2.5).build()) + .transientSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 3.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 4.5).build()); + ClusterState build = builder.metaData(metaData).build(); + + try { + updater.updateSettings(build, Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), "not a float").build(), + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), "not a float").put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build()); + fail("all or nothing"); + } catch (IllegalArgumentException ex) { + logger.info("", ex); + assertEquals("Failed to parse value [not a float] for setting [cluster.routing.allocation.balance.index]", ex.getMessage()); + } + assertNull("updater only does a dryRun", index.get()); + assertNull("updater only does a dryRun", shard.get()); + } + + public void testClusterBlock() { + ClusterState.Builder builder = ClusterState.builder(new ClusterName("foo")); + ClusterSettings settingsService = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + AtomicReference index = new AtomicReference<>(); + AtomicReference shard = new AtomicReference<>(); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, index::set); + settingsService.addSettingsUpdateConsumer(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, shard::set); + SettingsUpdater updater = new SettingsUpdater(settingsService); + MetaData.Builder metaData = MetaData.builder() + .persistentSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 2.5).build()) + .transientSettings(Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 3.5) + .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 4.5).build()); + ClusterState build = builder.metaData(metaData).build(); + + ClusterState clusterState = updater.updateSettings(build, Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), true).build(), + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.6).put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build()); + assertEquals(clusterState.blocks().global().size(), 1); + assertEquals(clusterState.blocks().global().iterator().next(), MetaData.CLUSTER_READ_ONLY_BLOCK); + + clusterState = updater.updateSettings(build, Settings.EMPTY, + Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), false).build()); + assertEquals(clusterState.blocks().global().size(), 0); + + } +} diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java index 55b0ba86aca..a64f67a8cd5 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -21,8 +21,8 @@ package org.elasticsearch.action.admin.cluster.stats; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.client.Requests; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index bb154218215..3ce9e99f4dc 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -285,4 +285,11 @@ public class CreateIndexIT extends ESIntegTestCase { assertThat(messages.toString(), containsString("mapper [text] is used by multiple types")); } } + + public void testRestartIndexCreationAfterFullClusterRestart() throws Exception { + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put("cluster.routing.allocation.enable", "none")).get(); + client().admin().indices().prepareCreate("test").setSettings(indexSettings()).get(); + internalCluster().fullRestart(); + ensureGreen("test"); + } } diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java similarity index 52% rename from core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java rename to core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java index 19cce93c6e4..8439e98f0ee 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java @@ -17,16 +17,19 @@ * under the License. */ -package org.elasticsearch.indices.flush; +package org.elasticsearch.action.admin.indices.flush; import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.ObjectIntMap; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse.ShardCounts; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.flush.IndicesSyncedFlushResult.ShardCounts; -import org.elasticsearch.indices.flush.SyncedFlushService.SyncedFlushResponse; +import org.elasticsearch.indices.flush.ShardsSyncedFlushResult; +import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; @@ -42,14 +45,11 @@ import static org.hamcrest.Matchers.hasSize; public class SyncedFlushUnitTests extends ESTestCase { - private static class TestPlan { - public ShardCounts totalCounts; - public Map countsPerIndex = new HashMap<>(); + public SyncedFlushResponse.ShardCounts totalCounts; + public Map countsPerIndex = new HashMap<>(); public ObjectIntMap expectedFailuresPerIndex = new ObjectIntHashMap<>(); - - public IndicesSyncedFlushResult result; - + public SyncedFlushResponse result; } public void testIndicesSyncedFlushResult() throws IOException { @@ -76,6 +76,56 @@ public class SyncedFlushUnitTests extends ESTestCase { } } + public void testResponseStreaming() throws IOException { + final TestPlan testPlan = createTestPlan(); + assertThat(testPlan.result.totalShards(), equalTo(testPlan.totalCounts.total)); + assertThat(testPlan.result.successfulShards(), equalTo(testPlan.totalCounts.successful)); + assertThat(testPlan.result.failedShards(), equalTo(testPlan.totalCounts.failed)); + assertThat(testPlan.result.restStatus(), equalTo(testPlan.totalCounts.failed > 0 ? RestStatus.CONFLICT : RestStatus.OK)); + BytesStreamOutput out = new BytesStreamOutput(); + testPlan.result.writeTo(out); + StreamInput in = StreamInput.wrap(out.bytes()); + SyncedFlushResponse readResponse = new SyncedFlushResponse(); + readResponse.readFrom(in); + assertThat(readResponse.totalShards(), equalTo(testPlan.totalCounts.total)); + assertThat(readResponse.successfulShards(), equalTo(testPlan.totalCounts.successful)); + assertThat(readResponse.failedShards(), equalTo(testPlan.totalCounts.failed)); + assertThat(readResponse.restStatus(), equalTo(testPlan.totalCounts.failed > 0 ? RestStatus.CONFLICT : RestStatus.OK)); + assertThat(readResponse.shardsResultPerIndex.size(), equalTo(testPlan.result.getShardsResultPerIndex().size())); + for (Map.Entry> entry : readResponse.getShardsResultPerIndex().entrySet()) { + List originalShardsResults = testPlan.result.getShardsResultPerIndex().get(entry.getKey()); + assertNotNull(originalShardsResults); + List readShardsResults = entry.getValue(); + assertThat(readShardsResults.size(), equalTo(originalShardsResults.size())); + for (int i = 0; i < readShardsResults.size(); i++) { + ShardsSyncedFlushResult originalShardResult = originalShardsResults.get(i); + ShardsSyncedFlushResult readShardResult = readShardsResults.get(i); + assertThat(originalShardResult.failureReason(), equalTo(readShardResult.failureReason())); + assertThat(originalShardResult.failed(), equalTo(readShardResult.failed())); + assertThat(originalShardResult.getShardId(), equalTo(readShardResult.getShardId())); + assertThat(originalShardResult.successfulShards(), equalTo(readShardResult.successfulShards())); + assertThat(originalShardResult.syncId(), equalTo(readShardResult.syncId())); + assertThat(originalShardResult.totalShards(), equalTo(readShardResult.totalShards())); + assertThat(originalShardResult.failedShards().size(), equalTo(readShardResult.failedShards().size())); + for (Map.Entry shardEntry : originalShardResult.failedShards().entrySet()) { + SyncedFlushService.ShardSyncedFlushResponse readShardResponse = readShardResult.failedShards().get(shardEntry.getKey()); + assertNotNull(readShardResponse); + SyncedFlushService.ShardSyncedFlushResponse originalShardResponse = shardEntry.getValue(); + assertThat(originalShardResponse.failureReason(), equalTo(readShardResponse.failureReason())); + assertThat(originalShardResponse.success(), equalTo(readShardResponse.success())); + } + assertThat(originalShardResult.shardResponses().size(), equalTo(readShardResult.shardResponses().size())); + for (Map.Entry shardEntry : originalShardResult.shardResponses().entrySet()) { + SyncedFlushService.ShardSyncedFlushResponse readShardResponse = readShardResult.shardResponses().get(shardEntry.getKey()); + assertNotNull(readShardResponse); + SyncedFlushService.ShardSyncedFlushResponse originalShardResponse = shardEntry.getValue(); + assertThat(originalShardResponse.failureReason(), equalTo(readShardResponse.failureReason())); + assertThat(originalShardResponse.success(), equalTo(readShardResponse.success())); + } + } + } + } + private void assertShardCount(String name, Map header, ShardCounts expectedCounts) { assertThat(name + " has unexpected total count", (Integer) header.get("total"), equalTo(expectedCounts.total)); assertThat(name + " has unexpected successful count", (Integer) header.get("successful"), equalTo(expectedCounts.successful)); @@ -105,32 +155,33 @@ public class SyncedFlushUnitTests extends ESTestCase { failures++; shardsResults.add(new ShardsSyncedFlushResult(shardId, replicas + 1, "simulated total failure")); } else { - Map shardResponses = new HashMap<>(); + Map shardResponses = new HashMap<>(); for (int copy = 0; copy < replicas + 1; copy++) { final ShardRouting shardRouting = TestShardRouting.newShardRouting(index, shard, "node_" + shardId + "_" + copy, null, - copy == 0, ShardRoutingState.STARTED, 0); + copy == 0, ShardRoutingState.STARTED, 0); if (randomInt(5) < 2) { // shard copy failure failed++; failures++; - shardResponses.put(shardRouting, new SyncedFlushResponse("copy failure " + shardId)); + shardResponses.put(shardRouting, new SyncedFlushService.ShardSyncedFlushResponse("copy failure " + shardId)); } else { successful++; - shardResponses.put(shardRouting, new SyncedFlushResponse()); + shardResponses.put(shardRouting, new SyncedFlushService.ShardSyncedFlushResponse()); } } shardsResults.add(new ShardsSyncedFlushResult(shardId, "_sync_id_" + shard, replicas + 1, shardResponses)); } } indicesResults.put(index, shardsResults); - testPlan.countsPerIndex.put(index, new ShardCounts(shards * (replicas + 1), successful, failed)); + testPlan.countsPerIndex.put(index, new SyncedFlushResponse.ShardCounts(shards * (replicas + 1), successful, failed)); testPlan.expectedFailuresPerIndex.put(index, failures); totalFailed += failed; totalShards += shards * (replicas + 1); totalSuccesful += successful; } - testPlan.result = new IndicesSyncedFlushResult(indicesResults); - testPlan.totalCounts = new ShardCounts(totalShards, totalSuccesful, totalFailed); + testPlan.result = new SyncedFlushResponse(indicesResults); + testPlan.totalCounts = new SyncedFlushResponse.ShardCounts(totalShards, totalSuccesful, totalFailed); return testPlan; } + } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index ffb9e630b70..3a81f0ba0d9 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.indices.shards; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.client.Requests; @@ -87,6 +86,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { for (ObjectCursor> shardStoreStatuses : shardStores.values()) { for (IndicesShardStoresResponse.StoreStatus storeStatus : shardStoreStatuses.value) { assertThat(storeStatus.getVersion(), greaterThan(-1l)); + assertThat(storeStatus.getAllocationId(), notNullValue()); assertThat(storeStatus.getNode(), notNullValue()); assertThat(storeStatus.getStoreException(), nullValue()); } @@ -108,7 +108,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { assertThat(shardStoresStatuses.size(), equalTo(unassignedShards.size())); for (IntObjectCursor> storesStatus : shardStoresStatuses) { assertThat("must report for one store", storesStatus.value.size(), equalTo(1)); - assertThat("reported store should be primary", storesStatus.value.get(0).getAllocation(), equalTo(IndicesShardStoresResponse.StoreStatus.Allocation.PRIMARY)); + assertThat("reported store should be primary", storesStatus.value.get(0).getAllocationStatus(), equalTo(IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY)); } logger.info("--> enable allocation"); enableAllocation(index); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java index cf197a27faf..70fd11e5de8 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java @@ -22,16 +22,25 @@ package org.elasticsearch.action.admin.indices.shards; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.transport.DummyTransportAddress; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.NodeDisconnectedException; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -44,9 +53,9 @@ public class IndicesShardStoreResponseTests extends ESTestCase { DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, Version.CURRENT); DiscoveryNode node2 = new DiscoveryNode("node2", DummyTransportAddress.INSTANCE, Version.CURRENT); List storeStatusList = new ArrayList<>(); - storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node1, 3, IndicesShardStoresResponse.StoreStatus.Allocation.PRIMARY, null)); - storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node2, 2, IndicesShardStoresResponse.StoreStatus.Allocation.REPLICA, null)); - storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, IndicesShardStoresResponse.StoreStatus.Allocation.UNUSED, new IOException("corrupted"))); + storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node1, 3, null, IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null)); + storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node2, 2, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA, null)); + storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.UNUSED, new IOException("corrupted"))); storeStatuses.put(0, storeStatusList); storeStatuses.put(1, storeStatusList); ImmutableOpenIntMap> storesMap = storeStatuses.build(); @@ -89,8 +98,10 @@ public class IndicesShardStoreResponseTests extends ESTestCase { IndicesShardStoresResponse.StoreStatus storeStatus = storeStatusList.get(i); assertThat(storeInfo.containsKey("version"), equalTo(true)); assertThat(((int) storeInfo.get("version")), equalTo(((int) storeStatus.getVersion()))); + assertThat(storeInfo.containsKey("allocation_id"), equalTo(true)); + assertThat(((String) storeInfo.get("allocation_id")), equalTo((storeStatus.getAllocationId()))); assertThat(storeInfo.containsKey("allocation"), equalTo(true)); - assertThat(((String) storeInfo.get("allocation")), equalTo(storeStatus.getAllocation().value())); + assertThat(((String) storeInfo.get("allocation")), equalTo(storeStatus.getAllocationStatus().value())); assertThat(storeInfo.containsKey(storeStatus.getNode().id()), equalTo(true)); if (storeStatus.getStoreException() != null) { assertThat(storeInfo.containsKey("store_exception"), equalTo(true)); @@ -104,11 +115,11 @@ public class IndicesShardStoreResponseTests extends ESTestCase { public void testStoreStatusOrdering() throws Exception { DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, Version.CURRENT); List orderedStoreStatuses = new ArrayList<>(); - orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 2, IndicesShardStoresResponse.StoreStatus.Allocation.PRIMARY, null)); - orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, IndicesShardStoresResponse.StoreStatus.Allocation.PRIMARY, null)); - orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, IndicesShardStoresResponse.StoreStatus.Allocation.REPLICA, null)); - orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, IndicesShardStoresResponse.StoreStatus.Allocation.UNUSED, null)); - orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 3, IndicesShardStoresResponse.StoreStatus.Allocation.REPLICA, new IOException("corrupted"))); + orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 2, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null)); + orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null)); + orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA, null)); + orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 1, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.UNUSED, null)); + orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, 3, Strings.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA, new IOException("corrupted"))); List storeStatuses = new ArrayList<>(orderedStoreStatuses); Collections.shuffle(storeStatuses, random()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java index 8539c6cb8ec..4d7e9aa216f 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java @@ -27,10 +27,12 @@ import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.notNullValue; public class IndicesStatsTests extends ESSingleNodeTestCase { - + public void testSegmentStatsEmptyIndex() { createIndex("test"); IndicesStatsResponse rsp = client().admin().indices().prepareStats("test").get(); @@ -41,7 +43,7 @@ public class IndicesStatsTests extends ESSingleNodeTestCase { assertEquals(0, stats.getNormsMemoryInBytes()); assertEquals(0, stats.getDocValuesMemoryInBytes()); } - + public void testSegmentStats() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index c642bdb1e79..09079be6ee9 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -30,7 +30,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.InvalidIndexTemplateException; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java index 143300ecc07..9d8002210e7 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java @@ -42,7 +42,6 @@ import org.junit.BeforeClass; import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.function.Predicate; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -62,10 +61,15 @@ public class UpgradeIT extends ESBackcompatTestCase { return 2; } + @Override + protected int maximumNumberOfReplicas() { + return Math.max(0, Math.min(backwardsCluster().numBackwardsDataNodes(), backwardsCluster().numNewDataNodes()) - 1); + } + public void testUpgrade() throws Exception { // allow the cluster to rebalance quickly - 2 concurrent rebalance are default we can do higher Settings.Builder builder = Settings.builder(); - builder.put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, 100); + builder.put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 100); client().admin().cluster().prepareUpdateSettings().setPersistentSettings(builder).get(); int numIndexes = randomIntBetween(2, 4); @@ -73,7 +77,7 @@ public class UpgradeIT extends ESBackcompatTestCase { for (int i = 0; i < numIndexes; ++i) { final String indexName = "test" + i; indexNames[i] = indexName; - + Settings settings = Settings.builder() .put("index.routing.allocation.exclude._name", backwardsCluster().newNodePattern()) // don't allow any merges so that we can check segments are upgraded @@ -101,7 +105,7 @@ public class UpgradeIT extends ESBackcompatTestCase { } else { assertEquals(0, flush(indexName).getFailedShards()); } - + // index more docs that won't be flushed numDocs = scaledRandomIntBetween(100, 1000); docs = new ArrayList<>(); @@ -117,25 +121,25 @@ public class UpgradeIT extends ESBackcompatTestCase { ensureGreen(); // disable allocation entirely until all nodes are upgraded builder = Settings.builder(); - builder.put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.NONE); + builder.put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE); client().admin().cluster().prepareUpdateSettings().setTransientSettings(builder).get(); backwardsCluster().upgradeAllNodes(); builder = Settings.builder(); // disable rebalanceing entirely for the time being otherwise we might get relocations / rebalance from nodes with old segments - builder.put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE); - builder.put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.ALL); + builder.put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE); + builder.put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.ALL); client().admin().cluster().prepareUpdateSettings().setTransientSettings(builder).get(); ensureGreen(); logger.info("--> Nodes upgrade complete"); logSegmentsState(); - + assertNotUpgraded(client()); final String indexToUpgrade = "test" + randomInt(numIndexes - 1); // This test fires up another node running an older version of ES, but because wire protocol changes across major ES versions, it // means we can never generate ancient segments in this test (unless Lucene major version bumps but ES major version does not): assertFalse(hasAncientSegments(client(), indexToUpgrade)); - + logger.info("--> Running upgrade on index " + indexToUpgrade); assertNoFailures(client().admin().indices().prepareUpgrade(indexToUpgrade).get()); awaitBusy(() -> { @@ -204,7 +208,7 @@ public class UpgradeIT extends ESBackcompatTestCase { assertEquals("index " + status.getIndex() + " should be upgraded", 0, status.getToUpgradeBytes()); } - + // double check using the segments api that all segments are actually upgraded IndicesSegmentResponse segsRsp; if (index == null) { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java index 237f3a2e821..04b58e6b9fc 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.bulk; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.get.MultiGetItemResponse; import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.get.MultiGetResponse; diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java new file mode 100644 index 00000000000..503daba8c2a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -0,0 +1,238 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.bulk; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.test.ESIntegTestCase; +import org.hamcrest.Matcher; + +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 2) +public class BulkProcessorRetryIT extends ESIntegTestCase { + private static final String INDEX_NAME = "test"; + private static final String TYPE_NAME = "type"; + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + //Have very low pool and queue sizes to overwhelm internal pools easily + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put("threadpool.generic.size", 1) + .put("threadpool.generic.queue_size", 1) + // don't mess with this one! It's quite sensitive to a low queue size + // (see also ThreadedActionListener which is happily spawning threads even when we already got rejected) + //.put("threadpool.listener.queue_size", 1) + .put("threadpool.get.queue_size", 1) + // default is 50 + .put("threadpool.bulk.queue_size", 30) + .build(); + } + + + public void testBulkRejectionLoadWithoutBackoff() throws Throwable { + boolean rejectedExecutionExpected = true; + executeBulkRejectionLoad(BackoffPolicy.noBackoff(), rejectedExecutionExpected); + } + + public void testBulkRejectionLoadWithBackoff() throws Throwable { + boolean rejectedExecutionExpected = false; + executeBulkRejectionLoad(BackoffPolicy.exponentialBackoff(), rejectedExecutionExpected); + } + + private void executeBulkRejectionLoad(BackoffPolicy backoffPolicy, boolean rejectedExecutionExpected) throws Throwable { + final CorrelatingBackoffPolicy internalPolicy = new CorrelatingBackoffPolicy(backoffPolicy); + int numberOfAsyncOps = randomIntBetween(600, 700); + final CountDownLatch latch = new CountDownLatch(numberOfAsyncOps); + final Set responses = Collections.newSetFromMap(new ConcurrentHashMap<>()); + + assertAcked(prepareCreate(INDEX_NAME)); + ensureGreen(); + + BulkProcessor bulkProcessor = BulkProcessor.builder(client(), new BulkProcessor.Listener() { + @Override + public void beforeBulk(long executionId, BulkRequest request) { + // no op + } + + @Override + public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { + internalPolicy.logResponse(response); + responses.add(response); + latch.countDown(); + } + + @Override + public void afterBulk(long executionId, BulkRequest request, Throwable failure) { + responses.add(failure); + latch.countDown(); + } + }).setBulkActions(1) + // zero means that we're in the sync case, more means that we're in the async case + .setConcurrentRequests(randomIntBetween(0, 100)) + .setBackoffPolicy(internalPolicy) + .build(); + indexDocs(bulkProcessor, numberOfAsyncOps); + latch.await(10, TimeUnit.SECONDS); + bulkProcessor.close(); + + assertThat(responses.size(), equalTo(numberOfAsyncOps)); + + // validate all responses + for (Object response : responses) { + if (response instanceof BulkResponse) { + BulkResponse bulkResponse = (BulkResponse) response; + for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) { + if (bulkItemResponse.isFailed()) { + BulkItemResponse.Failure failure = bulkItemResponse.getFailure(); + Throwable rootCause = ExceptionsHelper.unwrapCause(failure.getCause()); + if (rootCause instanceof EsRejectedExecutionException) { + if (rejectedExecutionExpected == false) { + Iterator backoffState = internalPolicy.backoffStateFor(bulkResponse); + assertNotNull("backoffState is null (indicates a bulk request got rejected without retry)", backoffState); + if (backoffState.hasNext()) { + // we're not expecting that we overwhelmed it even once when we maxed out the number of retries + throw new AssertionError("Got rejected although backoff policy would allow more retries", rootCause); + } else { + logger.debug("We maxed out the number of bulk retries and got rejected (this is ok)."); + } + } + } else { + throw new AssertionError("Unexpected failure", rootCause); + } + } + } + } else { + Throwable t = (Throwable) response; + // we're not expecting any other errors + throw new AssertionError("Unexpected failure", t); + } + } + + client().admin().indices().refresh(new RefreshRequest()).get(); + + // validate we did not create any duplicates due to retries + Matcher searchResultCount; + // it is ok if we lost some index operations to rejected executions (which is possible even when backing off (although less likely) + searchResultCount = lessThanOrEqualTo((long) numberOfAsyncOps); + + SearchResponse results = client() + .prepareSearch(INDEX_NAME) + .setTypes(TYPE_NAME) + .setQuery(QueryBuilders.matchAllQuery()) + .setSize(0) + .get(); + assertThat(results.getHits().totalHits(), searchResultCount); + } + + private static void indexDocs(BulkProcessor processor, int numDocs) { + for (int i = 1; i <= numDocs; i++) { + processor.add(client() + .prepareIndex() + .setIndex(INDEX_NAME) + .setType(TYPE_NAME) + .setId(Integer.toString(i)) + .setSource("field", randomRealisticUnicodeOfLengthBetween(1, 30)) + .request()); + } + } + + /** + * Internal helper class to correlate backoff states with bulk responses. This is needed to check whether we maxed out the number + * of retries but still got rejected (which is perfectly fine and can also happen from time to time under heavy load). + * + * This implementation relies on an implementation detail in Retry, namely that the bulk listener is notified on the same thread + * as the last call to the backoff policy's iterator. The advantage is that this is non-invasive to the rest of the production code. + */ + private static class CorrelatingBackoffPolicy extends BackoffPolicy { + private final Map> correlations = new ConcurrentHashMap<>(); + // this is intentionally *not* static final. We will only ever have one instance of this class per test case and want the + // thread local to be eligible for garbage collection right after the test to avoid leaks. + private final ThreadLocal> iterators = new ThreadLocal<>(); + + private final BackoffPolicy delegate; + + private CorrelatingBackoffPolicy(BackoffPolicy delegate) { + this.delegate = delegate; + } + + public Iterator backoffStateFor(BulkResponse response) { + return correlations.get(response); + } + + // Assumption: This method is called from the same thread as the last call to the internal iterator's #hasNext() / #next() + // see also Retry.AbstractRetryHandler#onResponse(). + public void logResponse(BulkResponse response) { + Iterator iterator = iterators.get(); + // did we ever retry? + if (iterator != null) { + // we should correlate any iterator only once + iterators.remove(); + correlations.put(response, iterator); + } + } + + @Override + public Iterator iterator() { + return new CorrelatingIterator(iterators, delegate.iterator()); + } + + private static class CorrelatingIterator implements Iterator { + private final Iterator delegate; + private final ThreadLocal> iterators; + + private CorrelatingIterator(ThreadLocal> iterators, Iterator delegate) { + this.iterators = iterators; + this.delegate = delegate; + } + + @Override + public boolean hasNext() { + // update on every invocation as we might get rescheduled on a different thread. Unfortunately, there is a chance that + // we pollute the thread local map with stale values. Due to the implementation of Retry and the life cycle of the + // enclosing class CorrelatingBackoffPolicy this should not pose a major problem though. + iterators.set(this); + return delegate.hasNext(); + } + + @Override + public TimeValue next() { + // update on every invocation + iterators.set(this); + return delegate.next(); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 78f96bab7b2..81eb832be9a 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -37,7 +37,12 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; public class BulkRequestTests extends ESTestCase { public void testSimpleBulk1() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java new file mode 100644 index 00000000000..ebb3b5211f1 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -0,0 +1,222 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.bulk; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.rest.NoOpClient; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class RetryTests extends ESTestCase { + // no need to wait fof a long time in tests + private static final TimeValue DELAY = TimeValue.timeValueMillis(1L); + private static final int CALLS_TO_FAIL = 5; + + private MockBulkClient bulkClient; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + this.bulkClient = new MockBulkClient(getTestName(), CALLS_TO_FAIL); + } + + @Override + @After + public void tearDown() throws Exception { + super.tearDown(); + this.bulkClient.close(); + } + + private BulkRequest createBulkRequest() { + BulkRequest request = new BulkRequest(); + request.add(new UpdateRequest("shop", "products", "1")); + request.add(new UpdateRequest("shop", "products", "2")); + request.add(new UpdateRequest("shop", "products", "3")); + request.add(new UpdateRequest("shop", "products", "4")); + request.add(new UpdateRequest("shop", "products", "5")); + return request; + } + + public void testSyncRetryBacksOff() throws Exception { + BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL); + + BulkRequest bulkRequest = createBulkRequest(); + BulkResponse response = Retry + .on(EsRejectedExecutionException.class) + .policy(backoff) + .withSyncBackoff(bulkClient, bulkRequest); + + assertFalse(response.hasFailures()); + assertThat(response.getItems().length, equalTo(bulkRequest.numberOfActions())); + } + + public void testSyncRetryFailsAfterBackoff() throws Exception { + BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL - 1); + + BulkRequest bulkRequest = createBulkRequest(); + BulkResponse response = Retry + .on(EsRejectedExecutionException.class) + .policy(backoff) + .withSyncBackoff(bulkClient, bulkRequest); + + assertTrue(response.hasFailures()); + assertThat(response.getItems().length, equalTo(bulkRequest.numberOfActions())); + } + + public void testAsyncRetryBacksOff() throws Exception { + BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL); + AssertingListener listener = new AssertingListener(); + + BulkRequest bulkRequest = createBulkRequest(); + Retry.on(EsRejectedExecutionException.class) + .policy(backoff) + .withAsyncBackoff(bulkClient, bulkRequest, listener); + + listener.awaitCallbacksCalled(); + listener.assertOnResponseCalled(); + listener.assertResponseWithoutFailures(); + listener.assertResponseWithNumberOfItems(bulkRequest.numberOfActions()); + listener.assertOnFailureNeverCalled(); + } + + public void testAsyncRetryFailsAfterBacksOff() throws Exception { + BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL - 1); + AssertingListener listener = new AssertingListener(); + + BulkRequest bulkRequest = createBulkRequest(); + Retry.on(EsRejectedExecutionException.class) + .policy(backoff) + .withAsyncBackoff(bulkClient, bulkRequest, listener); + + listener.awaitCallbacksCalled(); + + listener.assertOnResponseCalled(); + listener.assertResponseWithFailures(); + listener.assertResponseWithNumberOfItems(bulkRequest.numberOfActions()); + listener.assertOnFailureNeverCalled(); + } + + private static class AssertingListener implements ActionListener { + private final CountDownLatch latch; + private final AtomicInteger countOnResponseCalled = new AtomicInteger(); + private volatile Throwable lastFailure; + private volatile BulkResponse response; + + private AssertingListener() { + latch = new CountDownLatch(1); + } + + public void awaitCallbacksCalled() throws InterruptedException { + latch.await(); + } + + @Override + public void onResponse(BulkResponse bulkItemResponses) { + this.response = bulkItemResponses; + countOnResponseCalled.incrementAndGet(); + latch.countDown(); + } + + @Override + public void onFailure(Throwable e) { + this.lastFailure = e; + latch.countDown(); + } + + public void assertOnResponseCalled() { + assertThat(countOnResponseCalled.get(), equalTo(1)); + } + + public void assertResponseWithNumberOfItems(int numItems) { + assertThat(response.getItems().length, equalTo(numItems)); + } + + public void assertResponseWithoutFailures() { + assertThat(response, notNullValue()); + assertFalse("Response should not have failures", response.hasFailures()); + } + + public void assertResponseWithFailures() { + assertThat(response, notNullValue()); + assertTrue("Response should have failures", response.hasFailures()); + } + + public void assertOnFailureNeverCalled() { + assertThat(lastFailure, nullValue()); + } + } + + private static class MockBulkClient extends NoOpClient { + private int numberOfCallsToFail; + + private MockBulkClient(String testName, int numberOfCallsToFail) { + super(testName); + this.numberOfCallsToFail = numberOfCallsToFail; + } + + @Override + public ActionFuture bulk(BulkRequest request) { + PlainActionFuture responseFuture = new PlainActionFuture<>(); + bulk(request, responseFuture); + return responseFuture; + } + + @Override + public void bulk(BulkRequest request, ActionListener listener) { + // do everything synchronously, that's fine for a test + boolean shouldFail = numberOfCallsToFail > 0; + numberOfCallsToFail--; + + BulkItemResponse[] itemResponses = new BulkItemResponse[request.requests().size()]; + // if we have to fail, we need to fail at least once "reliably", the rest can be random + int itemToFail = randomInt(request.requests().size() - 1); + for (int idx = 0; idx < request.requests().size(); idx++) { + if (shouldFail && (randomBoolean() || idx == itemToFail)) { + itemResponses[idx] = failedResponse(); + } else { + itemResponses[idx] = successfulResponse(); + } + } + listener.onResponse(new BulkResponse(itemResponses, 1000L)); + } + + private BulkItemResponse successfulResponse() { + return new BulkItemResponse(1, "update", new DeleteResponse()); + } + + private BulkItemResponse failedResponse() { + return new BulkItemResponse(1, "update", new BulkItemResponse.Failure("test", "test", "1", new EsRejectedExecutionException("pool full"))); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java b/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java index 937cfb7b948..4fb94a4fb53 100644 --- a/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java @@ -23,7 +23,10 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; -import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.*; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GT; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GTE; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LT; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LTE; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java index 1d3a9e18757..ad246ebc530 100644 --- a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java @@ -27,7 +27,11 @@ import java.util.Arrays; import java.util.HashSet; import java.util.Set; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; /** */ diff --git a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index f21013b7fbe..6a14989be1a 100644 --- a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -25,6 +25,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -67,7 +69,7 @@ public class TransportActionFilterChainTests extends ESTestCase { String actionName = randomAsciiOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); - TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null) { + TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY)) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); @@ -147,7 +149,7 @@ public class TransportActionFilterChainTests extends ESTestCase { String actionName = randomAsciiOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); - TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null) { + TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY)) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); @@ -218,9 +220,9 @@ public class TransportActionFilterChainTests extends ESTestCase { RequestTestFilter testFilter = new RequestTestFilter(randomInt(), new RequestCallback() { @Override - public void execute(final String action, final ActionRequest actionRequest, final ActionListener actionListener, final ActionFilterChain actionFilterChain) { + public void execute(Task task, final String action, final ActionRequest actionRequest, final ActionListener actionListener, final ActionFilterChain actionFilterChain) { for (int i = 0; i <= additionalContinueCount; i++) { - actionFilterChain.proceed(action, actionRequest, actionListener); + actionFilterChain.proceed(task, action, actionRequest, actionListener); } } }); @@ -230,7 +232,7 @@ public class TransportActionFilterChainTests extends ESTestCase { String actionName = randomAsciiOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); - TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null) { + TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY)) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); @@ -286,7 +288,7 @@ public class TransportActionFilterChainTests extends ESTestCase { String actionName = randomAsciiOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); - TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null) { + TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY)) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); @@ -344,11 +346,11 @@ public class TransportActionFilterChainTests extends ESTestCase { @SuppressWarnings("unchecked") @Override - public void apply(String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { + public void apply(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { this.runs.incrementAndGet(); this.lastActionName = action; this.executionToken = counter.incrementAndGet(); - this.callback.execute(action, actionRequest, actionListener, actionFilterChain); + this.callback.execute(task, action, actionRequest, actionListener, actionFilterChain); } @Override @@ -375,8 +377,8 @@ public class TransportActionFilterChainTests extends ESTestCase { } @Override - public void apply(String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { - chain.proceed(action, request, listener); + public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + chain.proceed(task, action, request, listener); } @Override @@ -391,20 +393,20 @@ public class TransportActionFilterChainTests extends ESTestCase { private static enum RequestOperation implements RequestCallback { CONTINUE_PROCESSING { @Override - public void execute(String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { - actionFilterChain.proceed(action, actionRequest, actionListener); + public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { + actionFilterChain.proceed(task, action, actionRequest, actionListener); } }, LISTENER_RESPONSE { @Override @SuppressWarnings("unchecked") - public void execute(String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { + public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { actionListener.onResponse(new TestResponse()); } }, LISTENER_FAILURE { @Override - public void execute(String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { + public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { actionListener.onFailure(new ElasticsearchTimeoutException("")); } } @@ -433,7 +435,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } private static interface RequestCallback { - void execute(String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain); + void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain); } private static interface ResponseCallback { diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 6f5be649451..e4a1a9deed9 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -470,5 +470,16 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { @Override public void sendResponse(Throwable error) throws IOException { } + + @Override + public long getRequestId() { + return 0; + } + + @Override + public String getChannelType() { + return "test"; + } + } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index 825e3e40894..f20e54050c6 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -1,8 +1,26 @@ package org.elasticsearch.action.support.master; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.fd.FaultDetection; @@ -45,8 +63,8 @@ public class IndexingMasterFailoverIT extends ESIntegTestCase { .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // for hitting simulated network failures quickly .put(FaultDetection.SETTING_PING_RETRIES, "1") // for hitting simulated network failures quickly .put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out - .put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2) + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) .build(); internalCluster().startMasterOnlyNodesAsync(3, sharedSettings).get(); diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index b66196ae7d5..980558c2716 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -39,10 +39,10 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; @@ -57,7 +57,6 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -121,9 +120,9 @@ public class TransportMasterNodeActionTests extends ESTestCase { } @Override - protected void doExecute(final Request request, ActionListener listener) { + protected void doExecute(Task task, final Request request, ActionListener listener) { // remove unneeded threading by wrapping listener with SAME to prevent super.doExecute from wrapping it with LISTENER - super.doExecute(request, new ThreadedActionListener<>(logger, threadPool, ThreadPool.Names.SAME, listener)); + super.doExecute(task, request, new ThreadedActionListener<>(logger, threadPool, ThreadPool.Names.SAME, listener)); } @Override @@ -161,7 +160,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { if (masterOperationFailure) { listener.onFailure(exception); } else { @@ -346,4 +345,4 @@ public class TransportMasterNodeActionTests extends ESTestCase { assertTrue(listener.isDone()); assertThat(listener.get(), equalTo(response)); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index 4d17155f611..d1abe8653f0 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -20,8 +20,8 @@ package org.elasticsearch.action.support.replication; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushResponse; diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java index 406e476b4e0..913d52d5b17 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java @@ -27,7 +27,12 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.index.shard.ShardId; @@ -35,7 +40,10 @@ import org.elasticsearch.index.shard.ShardId; import java.util.HashSet; import java.util.Set; -import static org.elasticsearch.cluster.metadata.IndexMetaData.*; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomIntBetween; diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 5834b2662ad..fdcf4b07245 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -55,7 +55,10 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseOptions; +import org.elasticsearch.transport.TransportService; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -859,6 +862,16 @@ public class TransportReplicationActionTests extends ESTestCase { public void sendResponse(Throwable error) throws IOException { listener.onFailure(error); } + + @Override + public long getRequestId() { + return 0; + } + + @Override + public String getChannelType() { + return "replica_test"; + } }; } diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index ba6e6b6532d..d1217ea6f31 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -28,8 +28,19 @@ import org.apache.lucene.analysis.payloads.TypeAsPayloadTokenFilter; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.StandardTokenizer; import org.apache.lucene.analysis.util.CharArraySet; -import org.apache.lucene.document.*; -import org.apache.lucene.index.*; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.StringField; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; @@ -43,7 +54,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Locale; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java index 0eb7c0757e7..60fa0e9d684 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java @@ -24,7 +24,6 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.BytesStream; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java index 5507686e355..0c542698b5f 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.termvectors; import com.carrotsearch.hppc.ObjectIntHashMap; - import org.apache.lucene.analysis.payloads.PayloadHelper; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.DirectoryReader; diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java b/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java index cab27df6936..ec608e0bf54 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java @@ -255,7 +255,7 @@ public class TermVectorsUnitTests extends ESTestCase { assertThat(request.positions(), equalTo(req2.positions())); assertThat(request.termStatistics(), equalTo(req2.termStatistics())); assertThat(request.preference(), equalTo(pref)); - assertThat(request.routing(), equalTo(parent)); + assertThat(request.routing(), equalTo(null)); } } diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 6cf7a3384ab..51053f63a01 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -34,7 +34,11 @@ import org.elasticsearch.test.ESTestCase; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class UpdateRequestTests extends ESTestCase { public void testUpdateRequest() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java index 07d59b820aa..fe025060a9d 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.bwcompat; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.Version; diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java index 95735b8648f..43633fe6f27 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java @@ -19,17 +19,14 @@ package org.elasticsearch.bwcompat; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.index.Fields; import org.apache.lucene.util.English; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.get.GetResponse; @@ -40,11 +37,13 @@ import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 97e77a00e38..7011b4092e4 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.action.admin.indices.upgrade.UpgradeIT; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; @@ -60,9 +61,19 @@ import org.junit.Before; import java.io.IOException; import java.io.InputStream; -import java.nio.file.*; +import java.nio.file.DirectoryStream; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.SortedSet; +import java.util.TreeSet; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -107,7 +118,8 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { public Settings nodeSettings(int ord) { return Settings.builder() .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) // disable merging so no segments will be upgraded - .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 30) // increase recovery speed for small files + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 30) // speed up recoveries + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 30) .build(); } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index 228f1a65121..6ad05b3ff84 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -181,7 +181,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { logger.info("--> check settings"); ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); - assertThat(clusterState.metaData().persistentSettings().get(FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP + "version_attr"), equalTo(version)); + assertThat(clusterState.metaData().persistentSettings().get(FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "version_attr"), equalTo(version)); logger.info("--> check templates"); IndexTemplateMetaData template = clusterState.getMetaData().templates().get("template_" + version.toLowerCase(Locale.ROOT)); diff --git a/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index e93fbc8e14a..e7ba8de0f97 100644 --- a/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -29,6 +29,8 @@ import org.elasticsearch.client.AbstractClientHeadersTestCase; import org.elasticsearch.client.Client; import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import java.util.Collections; @@ -61,7 +63,7 @@ public class NodeClientHeadersTests extends AbstractClientHeadersTestCase { private static class InternalTransportAction extends TransportAction { private InternalTransportAction(Settings settings, String actionName, ThreadPool threadPool) { - super(settings, actionName, threadPool, EMPTY_FILTERS, null); + super(settings, actionName, threadPool, EMPTY_FILTERS, null, new TaskManager(settings)); } @Override diff --git a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java index 9f16ade87e8..d8f93a23140 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java +++ b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java @@ -20,7 +20,6 @@ package org.elasticsearch.client.transport; import com.carrotsearch.randomizedtesting.generators.RandomInts; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.node.liveness.LivenessResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -28,7 +27,14 @@ import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportServiceAdapter; import java.io.IOException; import java.util.Collections; diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java index f4b29768b91..f127ae28378 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java @@ -32,15 +32,16 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; @@ -114,12 +115,12 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTestCase { public String description() { return "a mock transport service"; } - public void onModule(TransportModule transportModule) { - transportModule.addTransportService("internal", InternalTransportService.class); + public void onModule(NetworkModule transportModule) { + transportModule.registerTransportService("internal", InternalTransportService.class); } @Override public Settings additionalSettings() { - return Settings.builder().put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, "internal").build(); + return Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "internal").build(); } } diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index 093e46186b3..72ace64d9ee 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BaseTransportResponseHandler; diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 5ed45620a03..31487614c99 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionModule; @@ -39,6 +38,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.store.Store; @@ -126,7 +126,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() // manual collection or upon cluster forming. - .put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT, "1s") + .put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.getKey(), "1s") .build(); } @@ -137,9 +137,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { } public void testClusterInfoServiceCollectsInformation() throws Exception { - internalCluster().startNodesAsync(2, - Settings.builder().put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, "200ms").build()) - .get(); + internalCluster().startNodesAsync(2).get(); assertAcked(prepareCreate("test").setSettings(settingsBuilder() .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0) .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE).build())); @@ -147,6 +145,8 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { InternalTestCluster internalTestCluster = internalCluster(); // Get the cluster info service on the master node final InternalClusterInfoService infoService = (InternalClusterInfoService) internalTestCluster.getInstance(ClusterInfoService.class, internalTestCluster.getMasterName()); + infoService.setUpdateFrequency(TimeValue.timeValueMillis(200)); + infoService.onMaster(); ClusterInfo info = infoService.refresh(); assertNotNull("info should not be null", info); ImmutableOpenMap leastUsages = info.getNodeLeastAvailableDiskUsages(); @@ -188,7 +188,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { public void testClusterInfoServiceInformationClearOnError() throws InterruptedException, ExecutionException { internalCluster().startNodesAsync(2, // manually control publishing - Settings.builder().put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, "60m").build()) + Settings.builder().put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.getKey(), "60m").build()) .get(); prepareCreate("test").setSettings(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).get(); ensureGreen("test"); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 4ca0fffbdfc..0e9f6cd9e04 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -31,11 +31,14 @@ import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllo import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; -import org.elasticsearch.cluster.settings.ClusterDynamicSettings; import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.ModuleTestCase; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.settings.IndexDynamicSettings; public class ClusterModuleTests extends ModuleTestCase { @@ -73,18 +76,20 @@ public class ClusterModuleTests extends ModuleTestCase { } public void testRegisterClusterDynamicSettingDuplicate() { - ClusterModule module = new ClusterModule(Settings.EMPTY); + final SettingsFilter settingsFilter = new SettingsFilter(Settings.EMPTY); + SettingsModule module = new SettingsModule(Settings.EMPTY, settingsFilter); try { - module.registerClusterDynamicSetting(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY); + module.registerSetting(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING); } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE + "] twice"); + assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice"); } } public void testRegisterClusterDynamicSetting() { - ClusterModule module = new ClusterModule(Settings.EMPTY); - module.registerClusterDynamicSetting("foo.bar", Validator.EMPTY); - assertInstanceBindingWithAnnotation(module, DynamicSettings.class, dynamicSettings -> dynamicSettings.hasDynamicSetting("foo.bar"), ClusterDynamicSettings.class); + final SettingsFilter settingsFilter = new SettingsFilter(Settings.EMPTY); + SettingsModule module = new SettingsModule(Settings.EMPTY, settingsFilter); + module.registerSetting(Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER)); + assertInstanceBinding(module, ClusterSettings.class, service -> service.hasDynamicSetting("foo.bar")); } public void testRegisterIndexDynamicSettingDuplicate() { diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java index 9e842a38722..6e7e338d8b9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java @@ -43,17 +43,29 @@ import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; /** * @@ -731,6 +743,59 @@ public class ClusterServiceIT extends ESIntegTestCase { } } + /* + * test that a listener throwing an exception while handling a + * notification does not prevent publication notification to the + * executor + */ + public void testClusterStateTaskListenerThrowingExceptionIsOkay() throws InterruptedException { + Settings settings = settingsBuilder() + .put("discovery.type", "local") + .build(); + internalCluster().startNode(settings); + ClusterService clusterService = internalCluster().getInstance(ClusterService.class); + + final CountDownLatch latch = new CountDownLatch(1); + AtomicBoolean published = new AtomicBoolean(); + + clusterService.submitStateUpdateTask( + "testClusterStateTaskListenerThrowingExceptionIsOkay", + new Object(), + ClusterStateTaskConfig.build(Priority.NORMAL), + new ClusterStateTaskExecutor() { + @Override + public boolean runOnlyOnMaster() { + return false; + } + + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + ClusterState newClusterState = ClusterState.builder(currentState).build(); + return BatchResult.builder().successes(tasks).build(newClusterState); + } + + @Override + public void clusterStatePublished(ClusterState newClusterState) { + published.set(true); + latch.countDown(); + } + }, + new ClusterStateTaskListener() { + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + throw new IllegalStateException(source); + } + + @Override + public void onFailure(String source, Throwable t) { + } + } + ); + + latch.await(); + assertTrue(published.get()); + } + public void testClusterStateBatchedUpdates() throws InterruptedException { Settings settings = settingsBuilder() .put("discovery.type", "local") @@ -751,23 +816,40 @@ public class ClusterServiceIT extends ESIntegTestCase { } } + int numberOfThreads = randomIntBetween(2, 8); + int tasksSubmittedPerThread = randomIntBetween(1, 1024); + int numberOfExecutors = Math.max(1, numberOfThreads / 4); + final Semaphore semaphore = new Semaphore(numberOfExecutors); + class TaskExecutor implements ClusterStateTaskExecutor { private AtomicInteger counter = new AtomicInteger(); + private AtomicInteger batches = new AtomicInteger(); + private AtomicInteger published = new AtomicInteger(); @Override public BatchResult execute(ClusterState currentState, List tasks) throws Exception { tasks.forEach(task -> task.execute()); counter.addAndGet(tasks.size()); - return BatchResult.builder().successes(tasks).build(currentState); + ClusterState maybeUpdatedClusterState = currentState; + if (randomBoolean()) { + maybeUpdatedClusterState = ClusterState.builder(currentState).build(); + batches.incrementAndGet(); + semaphore.acquire(); + } + return BatchResult.builder().successes(tasks).build(maybeUpdatedClusterState); } @Override public boolean runOnlyOnMaster() { return false; } + + @Override + public void clusterStatePublished(ClusterState newClusterState) { + published.incrementAndGet(); + semaphore.release(); + } } - int numberOfThreads = randomIntBetween(2, 8); - int tasksSubmittedPerThread = randomIntBetween(1, 1024); ConcurrentMap counters = new ConcurrentHashMap<>(); CountDownLatch updateLatch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread); @@ -784,7 +866,6 @@ public class ClusterServiceIT extends ESIntegTestCase { } }; - int numberOfExecutors = Math.max(1, numberOfThreads / 4); List executors = new ArrayList<>(); for (int i = 0; i < numberOfExecutors; i++) { executors.add(new TaskExecutor()); @@ -803,33 +884,43 @@ public class ClusterServiceIT extends ESIntegTestCase { counts.merge(executor, 1, (previous, one) -> previous + one); } - CountDownLatch startingGun = new CountDownLatch(1 + numberOfThreads); - List threads = new ArrayList<>(); + CountDownLatch startGate = new CountDownLatch(1); + CountDownLatch endGate = new CountDownLatch(numberOfThreads); + AtomicBoolean interrupted = new AtomicBoolean(); for (int i = 0; i < numberOfThreads; i++) { final int index = i; Thread thread = new Thread(() -> { - startingGun.countDown(); - for (int j = 0; j < tasksSubmittedPerThread; j++) { - ClusterStateTaskExecutor executor = assignments.get(index * tasksSubmittedPerThread + j); - clusterService.submitStateUpdateTask( - Thread.currentThread().getName(), - new Task(), - ClusterStateTaskConfig.build(randomFrom(Priority.values())), - executor, - listener); + try { + try { + startGate.await(); + } catch (InterruptedException e) { + interrupted.set(true); + return; + } + for (int j = 0; j < tasksSubmittedPerThread; j++) { + ClusterStateTaskExecutor executor = assignments.get(index * tasksSubmittedPerThread + j); + clusterService.submitStateUpdateTask( + Thread.currentThread().getName(), + new Task(), + ClusterStateTaskConfig.build(randomFrom(Priority.values())), + executor, + listener); + } + } finally { + endGate.countDown(); } }); - threads.add(thread); thread.start(); } - startingGun.countDown(); - for (Thread thread : threads) { - thread.join(); - } + startGate.countDown(); + endGate.await(); + assertFalse(interrupted.get()); // wait until all the cluster state updates have been processed updateLatch.await(); + // and until all of the publication callbacks have completed + semaphore.acquire(numberOfExecutors); // assert the number of executed tasks is correct assertEquals(numberOfThreads * tasksSubmittedPerThread, counter.get()); @@ -838,6 +929,7 @@ public class ClusterServiceIT extends ESIntegTestCase { for (TaskExecutor executor : executors) { if (counts.containsKey(executor)) { assertEquals((int) counts.get(executor), executor.counter.get()); + assertEquals(executor.batches.get(), executor.published.get()); } } @@ -940,7 +1032,7 @@ public class ClusterServiceIT extends ESIntegTestCase { public void testLongClusterStateUpdateLogging() throws Exception { Settings settings = settingsBuilder() .put("discovery.type", "local") - .put(InternalClusterService.SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD, "10s") + .put(InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey(), "10s") .build(); internalCluster().startNode(settings); ClusterService clusterService1 = internalCluster().getInstance(ClusterService.class); @@ -976,7 +1068,7 @@ public class ClusterServiceIT extends ESIntegTestCase { processedFirstTask.await(1, TimeUnit.SECONDS); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder() - .put(InternalClusterService.SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD, "10ms"))); + .put(InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey(), "10ms"))); clusterService1.submitStateUpdateTask("test2", new ClusterStateUpdateTask() { @Override diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 8d4540aad3b..faa0f15d8f2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -23,10 +23,21 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.*; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.metadata.RepositoriesMetaData; +import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.ImmutableOpenMap; diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 648356be173..2d726d97424 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -39,7 +39,11 @@ import org.elasticsearch.test.disruption.NetworkDelaysPartition; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; -import java.util.*; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -47,8 +51,14 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.isOneOf; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) @ESIntegTestCase.SuppressLocalMode @@ -280,7 +290,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { assertNoMasterBlockOnAllNodes(); logger.info("--> bringing another node up"); - internalCluster().startNode(settingsBuilder().put(settings).put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2).build()); + internalCluster().startNode(settingsBuilder().put(settings).put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2).build()); clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("2").get(); assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); } @@ -317,7 +327,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { // set an initial value which is at least quorum to avoid split brains during initial startup int initialMinMasterNodes = randomIntBetween(nodeCount / 2 + 1, nodeCount); - settings.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, initialMinMasterNodes); + settings.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), initialMinMasterNodes); logger.info("--> starting [{}] nodes. min_master_nodes set to [{}]", nodeCount, initialMinMasterNodes); @@ -328,19 +338,21 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { int updateCount = randomIntBetween(1, nodeCount); - logger.info("--> updating [{}] to [{}]", ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, updateCount); + logger.info("--> updating [{}] to [{}]", ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), updateCount); assertAcked(client().admin().cluster().prepareUpdateSettings() - .setPersistentSettings(settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, updateCount))); + .setPersistentSettings(settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), updateCount))); logger.info("--> verifying no node left and master is up"); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(nodeCount)).get().isTimedOut()); updateCount = nodeCount + randomIntBetween(1, 2000); - logger.info("--> trying to updating [{}] to [{}]", ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, updateCount); - assertThat(client().admin().cluster().prepareUpdateSettings() - .setPersistentSettings(settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, updateCount)) - .get().getPersistentSettings().getAsMap().keySet(), - empty()); + logger.info("--> trying to updating [{}] to [{}]", ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), updateCount); + try { + client().admin().cluster().prepareUpdateSettings() + .setPersistentSettings(settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), updateCount)); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "cannot set discovery.zen.minimum_master_nodes to more than the current master nodes count [" +updateCount+ "]"); + } logger.info("--> verifying no node left and master is up"); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(nodeCount)).get().isTimedOut()); @@ -351,8 +363,8 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { .put("discovery.type", "zen") .put(FaultDetection.SETTING_PING_TIMEOUT, "1h") // disable it .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2) - .put(DiscoverySettings.COMMIT_TIMEOUT, "100ms") // speed things up + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) + .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "100ms") // speed things up .build(); internalCluster().startNodesAsync(3, settings).get(); ensureGreen(); // ensure cluster state is recovered before we disrupt things diff --git a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java index e0f8b2cb840..8e5479d6f84 100644 --- a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -67,7 +67,7 @@ public class NoMasterNodeIT extends ESIntegTestCase { .put("discovery.zen.minimum_master_nodes", 2) .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") .put("discovery.initial_state_timeout", "500ms") - .put(DiscoverySettings.NO_MASTER_BLOCK, "all") + .put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "all") .build(); TimeValue timeout = TimeValue.timeValueMillis(200); @@ -219,7 +219,7 @@ public class NoMasterNodeIT extends ESIntegTestCase { .put("discovery.zen.minimum_master_nodes", 2) .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") .put("discovery.initial_state_timeout", "500ms") - .put(DiscoverySettings.NO_MASTER_BLOCK, "write") + .put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "write") .build(); internalCluster().startNode(settings); diff --git a/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java index 81de8b1a43c..03cfbf2b307 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java @@ -50,8 +50,9 @@ public class AckClusterUpdateSettingsIT extends ESIntegTestCase { .put(super.nodeSettings(nodeOrdinal)) //make sure that enough concurrent reroutes can happen at the same time //we have a minimum of 2 nodes, and a maximum of 10 shards, thus 5 should be enough - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, 5) - .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, 10) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 5) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 5) + .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 10) .build(); } @@ -69,7 +70,7 @@ public class AckClusterUpdateSettingsIT extends ESIntegTestCase { private void removePublishTimeout() { //to test that the acknowledgement mechanism is working we better disable the wait for publish //otherwise the operation is most likely acknowledged even if it doesn't support ack - assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "0"))); + assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "0"))); } public void testClusterUpdateSettingsAcknowledgement() { diff --git a/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java b/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java index 47517a753af..13a5cae6ca3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.ack; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; @@ -67,8 +66,8 @@ public class AckIT extends ESIntegTestCase { //to test that the acknowledgement mechanism is working we better disable the wait for publish //otherwise the operation is most likely acknowledged even if it doesn't support ack return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - .put(DiscoverySettings.PUBLISH_TIMEOUT, 0).build(); - } + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), 0).build(); +} public void testUpdateSettingsAcknowledgement() { createIndex("test"); diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index 96eea881e9e..4ad4a0a3d4c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -98,7 +98,7 @@ public class ShardStateActionTests extends ESTestCase { AtomicBoolean noMaster = new AtomicBoolean(); assert !noMaster.get(); - shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + shardStateAction.shardFailed(clusterService.state(), getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { @Override public void onShardFailedNoMaster() { noMaster.set(true); @@ -123,7 +123,7 @@ public class ShardStateActionTests extends ESTestCase { AtomicBoolean failure = new AtomicBoolean(); assert !failure.get(); - shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + shardStateAction.shardFailed(clusterService.state(), getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { @Override public void onShardFailedNoMaster() { @@ -156,7 +156,7 @@ public class ShardStateActionTests extends ESTestCase { TimeValue timeout = new TimeValue(1, TimeUnit.MILLISECONDS); CountDownLatch latch = new CountDownLatch(1); - shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), timeout, new ShardStateAction.Listener() { + shardStateAction.shardFailed(clusterService.state(), getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), timeout, new ShardStateAction.Listener() { @Override public void onShardFailedFailure(DiscoveryNode master, TransportException e) { if (e instanceof ReceiveTimeoutTransportException) { diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java index f9151628b8a..1e9c25ed78c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.allocation; import com.carrotsearch.hppc.ObjectIntHashMap; - import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexRoutingTable; @@ -106,9 +105,9 @@ public class AwarenessAllocationIT extends ESIntegTestCase { public void testAwarenessZones() throws Exception { Settings commonSettings = Settings.settingsBuilder() - .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP + "zone.values", "a,b") - .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES, "zone") - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 3) + .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.getKey() + "zone.values", "a,b") + .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey(), "zone") + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 3) .put(ZenDiscovery.SETTING_JOIN_TIMEOUT, "10s") .build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index 1605e70637e..6b406a3bfdf 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -21,9 +21,9 @@ package org.elasticsearch.cluster.allocation; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -56,7 +56,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY; -import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE; +import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; @@ -71,15 +71,15 @@ public class ClusterRerouteIT extends ESIntegTestCase { public void testRerouteWithCommands_disableAllocationSettings() throws Exception { Settings commonSettings = settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none") - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none") .build(); rerouteWithCommands(commonSettings); } public void testRerouteWithCommands_enableAllocationSettings() throws Exception { Settings commonSettings = settingsBuilder() - .put(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.NONE.name()) + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE.name()) .build(); rerouteWithCommands(commonSettings); } @@ -147,22 +147,23 @@ public class ClusterRerouteIT extends ESIntegTestCase { public void testRerouteWithAllocateLocalGateway_disableAllocationSettings() throws Exception { Settings commonSettings = settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none") - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none") .build(); rerouteWithAllocateLocalGateway(commonSettings); } public void testRerouteWithAllocateLocalGateway_enableAllocationSettings() throws Exception { Settings commonSettings = settingsBuilder() - .put(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.NONE.name()) + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE.name()) .build(); rerouteWithAllocateLocalGateway(commonSettings); } public void testDelayWithALargeAmountOfShards() throws Exception { Settings commonSettings = settingsBuilder() - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES, 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, 1) .build(); logger.info("--> starting 4 nodes"); String node_1 = internalCluster().startNode(commonSettings); @@ -279,7 +280,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> disable allocation"); Settings newSettings = settingsBuilder() - .put(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.NONE.name()) + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE.name()) .build(); client().admin().cluster().prepareUpdateSettings().setTransientSettings(newSettings).execute().actionGet(); @@ -348,4 +349,4 @@ public class ClusterRerouteIT extends ESIntegTestCase { setClusterReadOnly(false); } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java index 1faa82ad1b8..89a7f8ad65c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import java.io.IOException; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.test.ESIntegTestCase.*; +import static org.elasticsearch.test.ESIntegTestCase.Scope; import static org.hamcrest.Matchers.instanceOf; @ClusterScope(scope= Scope.TEST, numDataNodes =0) diff --git a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index a4d5a6f4a75..3562fa313ba 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -37,7 +37,10 @@ import java.io.IOException; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class ClusterStateHealthTests extends ESTestCase { private final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(Settings.EMPTY); @@ -116,4 +119,4 @@ public class ClusterStateHealthTests extends ESTestCase { assertThat(clusterStateHealth.getValidationFailures(), empty()); assertThat(clusterStateHealth.getActiveShardsPercent(), is(allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(100.0)))); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java b/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java index 730763372ca..5f48c5abde9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java @@ -21,7 +21,11 @@ package org.elasticsearch.cluster.health; import com.carrotsearch.randomizedtesting.RandomizedContext; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.index.shard.ShardId; class RoutingTableGenerator { diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java deleted file mode 100644 index e4462007c9a..00000000000 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java +++ /dev/null @@ -1,340 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.metadata; - -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; -import org.elasticsearch.test.ESTestCase; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; - -public class MappingMetaDataParserTests extends ESTestCase { - public void testParseIdAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value", "1"); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.idResolved(), equalTo(true)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), nullValue()); - assertThat(parseContext.timestampResolved(), equalTo(false)); - } - - public void testFailIfIdIsNoValue() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startArray("id").value("id").endArray().field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value", "1"); - try { - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - fail(); - } catch (MapperParsingException ex) { - // bogus its an array - } - - bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("id").field("x", "id").endObject().field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - parseContext = md.createParseContext(null, "routing_value", "1"); - try { - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - fail(); - } catch (MapperParsingException ex) { - // bogus its an object - } - } - - public void testParseRoutingAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext("id", null, "1"); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.idResolved(), equalTo(false)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), nullValue()); - assertThat(parseContext.timestampResolved(), equalTo(false)); - } - - public void testParseTimestampAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext("id", "routing_value1", null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.idResolved(), equalTo(false)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), equalTo("1")); - assertThat(parseContext.timestampResolved(), equalTo(true)); - } - - public void testParseTimestampEquals() throws Exception { - MappingMetaData md1 = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - MappingMetaData md2 = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - assertThat(md1, equalTo(md2)); - } - - public void testParseIdAndRoutingAndTimestamp() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "routing"), - new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.timestamp(), equalTo("1")); - } - - public void testParseIdAndRoutingAndTimestampWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("id", "id").field("routing", "routing_value").endObject() - .startObject("obj2").field("timestamp", "1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.timestamp(), equalTo("1")); - } - - public void testParseIdWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("id", "id").field("routing", "routing_value").endObject() - .startObject("obj2").field("timestamp", "1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value", "2"); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.idResolved(), equalTo(true)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), nullValue()); - assertThat(parseContext.timestampResolved(), equalTo(false)); - } - - public void testParseRoutingWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("id", "id").field("routing", "routing_value").endObject() - .startObject("obj2").field("timestamp", "1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext("id", null, "2"); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.idResolved(), equalTo(false)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), nullValue()); - assertThat(parseContext.timestampResolved(), equalTo(false)); - } - - public void testParseTimestampWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("routing", "routing_value").endObject() - .startObject("obj2").field("timestamp", "1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value1", null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.idResolved(), equalTo(false)); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.routingResolved(), equalTo(true)); - assertThat(parseContext.timestamp(), equalTo("1")); - assertThat(parseContext.timestampResolved(), equalTo(true)); - } - - public void testParseIdAndRoutingAndTimestampWithinSamePath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject() - .startObject("obj2").field("field1", "value1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.timestamp(), equalTo("1")); - } - - public void testParseIdAndRoutingAndTimestampWithinSamePathAndMoreLevels() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.obj0.id"), - new MappingMetaData.Routing(true, "obj1.obj2.routing"), - new MappingMetaData.Timestamp(true, "obj1.obj3.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1") - .startObject("obj0") - .field("id", "id") - .endObject() - .startObject("obj2") - .field("routing", "routing_value") - .endObject() - .startObject("obj3") - .field("timestamp", "1") - .endObject() - .endObject() - .startObject("obj2").field("field1", "value1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.timestamp(), equalTo("1")); - } - - public void testParseIdAndRoutingAndTimestampWithSameRepeatedObject() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("obj1.id"), - new MappingMetaData.Routing(true, "obj1.routing"), - new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2") - .startObject("obj0").field("field1", "value1").field("field2", "value2").endObject() - .startObject("obj1").field("id", "id").endObject() - .startObject("obj1").field("routing", "routing_value").endObject() - .startObject("obj1").field("timestamp", "1").endObject() - .endObject().bytes().toBytes(); - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("id")); - assertThat(parseContext.routing(), equalTo("routing_value")); - assertThat(parseContext.timestamp(), equalTo("1")); - } - - public void testParseIdRoutingTimestampWithRepeatedField() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("field1"), - new MappingMetaData.Routing(true, "field1.field1"), - new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - - byte[] bytes = jsonBuilder().startObject() - .field("aaa", "wr") - .array("arr1", "1", "2", "3") - .field("field1", "foo") - .field("field1", "bar") - .field("test", "value") - .field("zzz", "wr") - .endObject().bytes().toBytes(); - - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), equalTo("foo")); - assertThat(parseContext.routing(), nullValue()); - assertThat(parseContext.timestamp(), equalTo("foo")); - } - - public void testParseNoIdRoutingWithRepeatedFieldAndObject() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id("id"), - new MappingMetaData.Routing(true, "field1.field1.field2"), - new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - - byte[] bytes = jsonBuilder().startObject() - .field("aaa", "wr") - .array("arr1", "1", "2", "3") - .field("field1", "foo") - .startObject("field1").field("field2", "bar").endObject() - .field("test", "value") - .field("zzz", "wr") - .endObject().bytes().toBytes(); - - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.routing(), nullValue()); - assertThat(parseContext.timestamp(), equalTo("foo")); - } - - public void testParseRoutingWithRepeatedFieldAndValidRouting() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), - new MappingMetaData.Id(null), - new MappingMetaData.Routing(true, "field1.field2"), - new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - - byte[] bytes = jsonBuilder().startObject() - .field("aaa", "wr") - .array("arr1", "1", "2", "3") - .field("field1", "foo") - .startObject("field1").field("field2", "bar").endObject() - .field("test", "value") - .field("zzz", "wr") - .endObject().bytes().toBytes(); - - MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null); - md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext); - assertThat(parseContext.id(), nullValue()); - assertThat(parseContext.routing(), equalTo("bar")); - assertThat(parseContext.timestamp(), equalTo("foo")); - } -} diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 59116859322..91a421ee420 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class MetaDataTests extends ESTestCase { @@ -41,4 +42,72 @@ public class MetaDataTests extends ESTestCase { } } + public void testResolveIndexRouting() { + IndexMetaData.Builder builder = IndexMetaData.builder("index") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("alias0").build()) + .putAlias(AliasMetaData.builder("alias1").routing("1").build()) + .putAlias(AliasMetaData.builder("alias2").routing("1,2").build()); + MetaData metaData = MetaData.builder().put(builder).build(); + + // no alias, no index + assertEquals(metaData.resolveIndexRouting(null, null, null), null); + assertEquals(metaData.resolveIndexRouting(null, "0", null), "0"); + assertEquals(metaData.resolveIndexRouting("32", "0", null), "0"); + assertEquals(metaData.resolveIndexRouting("32", null, null), "32"); + + // index, no alias + assertEquals(metaData.resolveIndexRouting("32", "0", "index"), "0"); + assertEquals(metaData.resolveIndexRouting("32", null, "index"), "32"); + assertEquals(metaData.resolveIndexRouting(null, null, "index"), null); + assertEquals(metaData.resolveIndexRouting(null, "0", "index"), "0"); + + // alias with no index routing + assertEquals(metaData.resolveIndexRouting(null, null, "alias0"), null); + assertEquals(metaData.resolveIndexRouting(null, "0", "alias0"), "0"); + assertEquals(metaData.resolveIndexRouting("32", null, "alias0"), "32"); + assertEquals(metaData.resolveIndexRouting("32", "0", "alias0"), "0"); + + // alias with index routing. + assertEquals(metaData.resolveIndexRouting(null, null, "alias1"), "1"); + assertEquals(metaData.resolveIndexRouting("32", null, "alias1"), "1"); + assertEquals(metaData.resolveIndexRouting("32", "1", "alias1"), "1"); + try { + metaData.resolveIndexRouting(null, "0", "alias1"); + fail("should fail"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), is("Alias [alias1] has index routing associated with it [1], and was provided with routing value [0], rejecting operation")); + } + + try { + metaData.resolveIndexRouting("32", "0", "alias1"); + fail("should fail"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), is("Alias [alias1] has index routing associated with it [1], and was provided with routing value [0], rejecting operation")); + } + + // alias with invalid index routing. + try { + metaData.resolveIndexRouting(null, null, "alias2"); + fail("should fail"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + } + + try { + metaData.resolveIndexRouting(null, "1", "alias2"); + fail("should fail"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + } + + try { + metaData.resolveIndexRouting("32", null, "alias2"); + fail("should fail"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java index 9a91e1cd562..0315f1568af 100644 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java @@ -29,7 +29,11 @@ import org.junit.BeforeClass; import java.net.InetAddress; import java.net.UnknownHostException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java new file mode 100644 index 00000000000..340fdcc3c99 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -0,0 +1,122 @@ +package org.elasticsearch.cluster.routing; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayAllocator; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.disruption.NetworkDisconnectPartition; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.equalTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +@ESIntegTestCase.SuppressLocalMode +public class PrimaryAllocationIT extends ESIntegTestCase { + + public void testDoNotAllowStaleReplicasToBePromotedToPrimary() throws Exception { + logger.info("--> starting 3 nodes, 1 master, 2 data"); + String master = internalCluster().startMasterOnlyNode(Settings.EMPTY); + internalCluster().startDataOnlyNodesAsync(2).get(); + + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", 1).put("index.number_of_replicas", 1)).get()); + ensureGreen(); + logger.info("--> indexing..."); + client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + refresh(); + + ClusterState state = client().admin().cluster().prepareState().all().get().getState(); + List shards = state.routingTable().allShards("test"); + assertThat(shards.size(), equalTo(2)); + + final String primaryNode; + final String replicaNode; + if (shards.get(0).primary()) { + primaryNode = state.getRoutingNodes().node(shards.get(0).currentNodeId()).node().name(); + replicaNode = state.getRoutingNodes().node(shards.get(1).currentNodeId()).node().name(); + } else { + primaryNode = state.getRoutingNodes().node(shards.get(1).currentNodeId()).node().name(); + replicaNode = state.getRoutingNodes().node(shards.get(0).currentNodeId()).node().name(); + } + + NetworkDisconnectPartition partition = new NetworkDisconnectPartition( + new HashSet<>(Arrays.asList(master, replicaNode)), Collections.singleton(primaryNode), random()); + internalCluster().setDisruptionScheme(partition); + logger.info("--> partitioning node with primary shard from rest of cluster"); + partition.startDisrupting(); + + ensureStableCluster(2, master); + + logger.info("--> index a document into previous replica shard (that is now primary)"); + client(replicaNode).prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + + logger.info("--> shut down node that has new acknowledged document"); + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNode)); + + ensureStableCluster(1, master); + + partition.stopDisrupting(); + + logger.info("--> waiting for node with old primary shard to rejoin the cluster"); + ensureStableCluster(2, master); + + logger.info("--> check that old primary shard does not get promoted to primary again"); + // kick reroute and wait for all shard states to be fetched + client(master).admin().cluster().prepareReroute().get(); + assertBusy(() -> assertThat(internalCluster().getInstance(GatewayAllocator.class, master).getNumberOfInFlightFetch(), equalTo(0))); + // kick reroute a second time and check that all shards are unassigned + assertThat(client(master).admin().cluster().prepareReroute().get().getState().getRoutingNodes().unassigned().size(), equalTo(2)); + + logger.info("--> starting node that reuses data folder with the up-to-date primary shard"); + internalCluster().startDataOnlyNode(Settings.EMPTY); + + logger.info("--> check that the up-to-date primary shard gets promoted and that documents are available"); + ensureYellow("test"); + assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2l); + } + + public void testNotWaitForQuorumCopies() throws Exception { + logger.info("--> starting 3 nodes"); + internalCluster().startNodesAsync(3).get(); + logger.info("--> creating index with 1 primary and 2 replicas"); + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", randomIntBetween(1, 3)).put("index.number_of_replicas", 2)).get()); + ensureGreen("test"); + client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + logger.info("--> removing 2 nodes from cluster"); + internalCluster().stopRandomDataNode(); + internalCluster().stopRandomDataNode(); + internalCluster().fullRestart(); + logger.info("--> checking that index still gets allocated with only 1 shard copy being available"); + ensureYellow("test"); + assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 1l); + } +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java b/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java index 47ae3e68580..72ecc171eed 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java @@ -19,7 +19,9 @@ package org.elasticsearch.cluster.routing; -import static org.elasticsearch.test.ESTestCase.*; +import static org.elasticsearch.test.ESTestCase.randomAsciiOfLength; +import static org.elasticsearch.test.ESTestCase.randomFrom; +import static org.elasticsearch.test.ESTestCase.randomInt; /** * Utility class the makes random modifications to ShardRouting @@ -30,7 +32,7 @@ public final class RandomShardRoutingMutator { } public static void randomChange(ShardRouting shardRouting, String[] nodes) { - switch (randomInt(3)) { + switch (randomInt(2)) { case 0: if (shardRouting.unassigned() == false) { shardRouting.moveToUnassigned(new UnassignedInfo(randomReason(), randomAsciiOfLength(10))); @@ -44,13 +46,6 @@ public final class RandomShardRoutingMutator { } break; case 2: - if (shardRouting.primary()) { - shardRouting.moveFromPrimary(); - } else { - shardRouting.moveToPrimary(); - } - break; - case 3: if (shardRouting.initializing()) { shardRouting.moveToStarted(); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java index e8be4e34ae0..5ff4a328ef6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java @@ -53,6 +53,10 @@ public class RoutingBackwardCompatibilityTests extends ESTestCase { OperationRouting operationRouting = new OperationRouting(Settings.EMPTY, null); for (Version version : VersionUtils.allVersions()) { + if (version.onOrAfter(Version.V_2_0_0) == false) { + // unsupported version, no need to test + continue; + } final Settings settings = settings(version).build(); IndexMetaData indexMetaData = IndexMetaData.builder(index).settings(settings).numberOfShards(numberOfShards).numberOfReplicas(randomInt(3)).build(); MetaData.Builder metaData = MetaData.builder().put(indexMetaData, false); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java index d69264a1e3a..713bf0aa311 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java @@ -50,8 +50,8 @@ public class RoutingTableTests extends ESAllocationTestCase { private int totalNumberOfShards; private final static Settings DEFAULT_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); private final AllocationService ALLOCATION_SERVICE = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", Integer.MAX_VALUE) // don't limit recoveries + .put("cluster.routing.allocation.node_initial_primaries_recoveries", Integer.MAX_VALUE) .build()); private ClusterState clusterState; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index 3288b92cb8e..20a731b0153 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -39,8 +39,13 @@ import org.elasticsearch.test.ESAllocationTestCase; import java.util.Collections; import java.util.EnumSet; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; /** */ diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ActiveAllocationIdTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ActiveAllocationIdTests.java index 7a7f4722e97..2e54512b95f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ActiveAllocationIdTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ActiveAllocationIdTests.java @@ -1,5 +1,24 @@ package org.elasticsearch.cluster.routing.allocation; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java index ee8bd067008..91ba1f4999c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java @@ -50,7 +50,7 @@ public class AddIncrementallyTests extends ESAllocationTestCase { public void testAddNodesAndIndices() { Settings.Builder settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); AllocationService service = createAllocationService(settings.build()); ClusterState clusterState = initCluster(service, 1, 3, 3, 1); @@ -93,7 +93,7 @@ public class AddIncrementallyTests extends ESAllocationTestCase { public void testMinimalRelocations() { Settings.Builder settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()) + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()) .put("cluster.routing.allocation.node_concurrent_recoveries", 2); AllocationService service = createAllocationService(settings.build()); @@ -161,7 +161,7 @@ public class AddIncrementallyTests extends ESAllocationTestCase { public void testMinimalRelocationsNoLimit() { Settings.Builder settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()) + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()) .put("cluster.routing.allocation.node_concurrent_recoveries", 100) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100); AllocationService service = createAllocationService(settings.build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 6ac2b7df9ca..4c4fa72a6ec 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -54,7 +54,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(AllocationCommandsTests.class); public void testMoveShardCommand() { - AllocationService allocation = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService allocation = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("creating an index with 1 shard, no replica"); MetaData metaData = MetaData.builder() @@ -98,8 +98,8 @@ public class AllocationCommandsTests extends ESAllocationTestCase { public void testAllocateCommand() { AllocationService allocation = createAllocationService(settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none") - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none") .build()); logger.info("--> building initial routing table"); @@ -186,8 +186,8 @@ public class AllocationCommandsTests extends ESAllocationTestCase { public void testCancelCommand() { AllocationService allocation = createAllocationService(settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none") - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none") .build()); logger.info("--> building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java index d7a049d1b92..52aad66776e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java @@ -38,9 +38,10 @@ public class AllocationPriorityTests extends ESAllocationTestCase { */ public void testPrioritizedIndicesAllocatedFirst() { AllocationService allocation = createAllocationService(settingsBuilder(). - put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES, 1) - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, 1) - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, 1).build()); + put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 10) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.getKey(), 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 1).build()); final String highPriorityName; final String lowPriorityName; final int priorityFirst; @@ -84,7 +85,7 @@ public class AllocationPriorityTests extends ESAllocationTestCase { routingTable = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); - assertEquals(2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size()); + assertEquals(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).toString(),2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size()); assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).index()); assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).index()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java index 7be6037cf79..eb94b6de109 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java @@ -54,8 +54,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testMoveShardOnceNewNodeWithAttributeAdded1() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -123,7 +123,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testMoveShardOnceNewNodeWithAttributeAdded2() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -193,7 +193,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.awareness.attributes", "rack_id") .put("cluster.routing.allocation.balance.index", 0.0f) @@ -293,7 +293,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -386,8 +386,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testMoveShardOnceNewNodeWithAttributeAdded5() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -464,8 +464,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testMoveShardOnceNewNodeWithAttributeAdded6() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -544,8 +544,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testFullAwareness1() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.force.rack_id.values", "1,2") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -611,8 +611,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testFullAwareness2() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.force.rack_id.values", "1,2") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); @@ -681,7 +681,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.awareness.force.rack_id.values", "1,2") .put("cluster.routing.allocation.awareness.attributes", "rack_id") @@ -767,7 +767,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { .put("cluster.routing.allocation.awareness.attributes", "zone") .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -827,8 +827,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { public void testUnassignedShardsWithUnbalancedZones() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "zone") .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java index 1092b2ede19..08cbdc09fe0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; - +import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.EmptyClusterInfoService; @@ -39,8 +39,8 @@ import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.test.ESAllocationTestCase; import org.elasticsearch.test.gateway.NoopGatewayAllocator; import org.hamcrest.Matchers; @@ -65,10 +65,10 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { final float balanceTreshold = 1.0f; Settings.Builder settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); - settings.put(BalancedShardsAllocator.SETTING_INDEX_BALANCE_FACTOR, indexBalance); - settings.put(BalancedShardsAllocator.SETTING_SHARD_BALANCE_FACTOR, replicaBalance); - settings.put(BalancedShardsAllocator.SETTING_THRESHOLD, balanceTreshold); + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); + settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), indexBalance); + settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), replicaBalance); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), balanceTreshold); AllocationService strategy = createAllocationService(settings.build()); @@ -90,10 +90,10 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { final float balanceTreshold = 1.0f; Settings.Builder settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); - settings.put(BalancedShardsAllocator.SETTING_INDEX_BALANCE_FACTOR, indexBalance); - settings.put(BalancedShardsAllocator.SETTING_SHARD_BALANCE_FACTOR, replicaBalance); - settings.put(BalancedShardsAllocator.SETTING_THRESHOLD, balanceTreshold); + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); + settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), indexBalance); + settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), replicaBalance); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), balanceTreshold); AllocationService strategy = createAllocationService(settings.build()); @@ -279,36 +279,30 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { public void testPersistedSettings() { Settings.Builder settings = settingsBuilder(); - settings.put(BalancedShardsAllocator.SETTING_INDEX_BALANCE_FACTOR, 0.2); - settings.put(BalancedShardsAllocator.SETTING_SHARD_BALANCE_FACTOR, 0.3); - settings.put(BalancedShardsAllocator.SETTING_THRESHOLD, 2.0); - final NodeSettingsService.Listener[] listeners = new NodeSettingsService.Listener[1]; - NodeSettingsService service = new NodeSettingsService(settingsBuilder().build()) { - - @Override - public void addListener(Listener listener) { - assertNull("addListener was called twice while only one time was expected", listeners[0]); - listeners[0] = listener; - } - - }; + settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.2); + settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 0.3); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), 2.0); + ClusterSettings service = new ClusterSettings(settingsBuilder().build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); BalancedShardsAllocator allocator = new BalancedShardsAllocator(settings.build(), service); assertThat(allocator.getIndexBalance(), Matchers.equalTo(0.2f)); assertThat(allocator.getShardBalance(), Matchers.equalTo(0.3f)); assertThat(allocator.getThreshold(), Matchers.equalTo(2.0f)); settings = settingsBuilder(); - settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); - listeners[0].onRefreshSettings(settings.build()); + settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.2); + settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 0.3); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), 2.0); + settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); + service.applySettings(settings.build()); assertThat(allocator.getIndexBalance(), Matchers.equalTo(0.2f)); assertThat(allocator.getShardBalance(), Matchers.equalTo(0.3f)); assertThat(allocator.getThreshold(), Matchers.equalTo(2.0f)); settings = settingsBuilder(); - settings.put(BalancedShardsAllocator.SETTING_INDEX_BALANCE_FACTOR, 0.5); - settings.put(BalancedShardsAllocator.SETTING_SHARD_BALANCE_FACTOR, 0.1); - settings.put(BalancedShardsAllocator.SETTING_THRESHOLD, 3.0); - listeners[0].onRefreshSettings(settings.build()); + settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.5); + settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 0.1); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), 3.0); + service.applySettings(settings.build()); assertThat(allocator.getIndexBalance(), Matchers.equalTo(0.5f)); assertThat(allocator.getShardBalance(), Matchers.equalTo(0.1f)); assertThat(allocator.getThreshold(), Matchers.equalTo(3.0f)); @@ -317,7 +311,7 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { public void testNoRebalanceOnPrimaryOverload() { Settings.Builder settings = settingsBuilder(); AllocationService strategy = new AllocationService(settings.build(), randomAllocationDeciders(settings.build(), - new NodeSettingsService(Settings.Builder.EMPTY_SETTINGS), getRandom()), new ShardsAllocators(settings.build(), + new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), new ShardsAllocators(settings.build(), NoopGatewayAllocator.INSTANCE, new ShardsAllocator() { @Override @@ -365,7 +359,9 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { public boolean allocateUnassigned(RoutingAllocation allocation) { RoutingNodes.UnassignedShards unassigned = allocation.routingNodes().unassigned(); boolean changed = !unassigned.isEmpty(); - for (ShardRouting sr : unassigned.drain()) { + ShardRouting[] drain = unassigned.drain(); + ArrayUtil.timSort(drain, (a, b) -> { return a.primary() ? -1 : 1; }); // we have to allocate primaries first + for (ShardRouting sr : drain) { switch (sr.id()) { case 0: if (sr.primary()) { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java index 8dad41db2f8..fa9f4065dc1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java @@ -37,7 +37,10 @@ import org.elasticsearch.test.gateway.NoopGatewayAllocator; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -46,7 +49,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(ClusterRebalanceRoutingTests.class); public void testAlways() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()).build()); MetaData metaData = MetaData.builder() @@ -132,7 +135,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { public void testClusterPrimariesActive1() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.INDICES_PRIMARIES_ACTIVE.toString()).build()); MetaData metaData = MetaData.builder() @@ -236,7 +239,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { } public void testClusterPrimariesActive2() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.INDICES_PRIMARIES_ACTIVE.toString()).build()); MetaData metaData = MetaData.builder() @@ -320,7 +323,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { } public void testClusterAllActive1() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.INDICES_ALL_ACTIVE.toString()).build()); MetaData metaData = MetaData.builder() @@ -443,7 +446,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { } public void testClusterAllActive2() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.INDICES_ALL_ACTIVE.toString()).build()); MetaData metaData = MetaData.builder() @@ -527,7 +530,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { } public void testClusterAllActive3() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.INDICES_ALL_ACTIVE.toString()).build()); MetaData metaData = MetaData.builder() @@ -737,7 +740,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { public void testRebalanceWhileShardFetching() { final AtomicBoolean hasFetches = new AtomicBoolean(true); - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()).build(), new NoopGatewayAllocator() { @Override public boolean allocateUnassigned(RoutingAllocation allocation) { @@ -823,4 +826,4 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { assertEquals(numRelocating, 1); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java index 34d78ae3099..886462610ca 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java @@ -43,7 +43,7 @@ public class ConcurrentRebalanceRoutingTests extends ESAllocationTestCase { public void testClusterConcurrentRebalance() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.cluster_concurrent_rebalance", 3) .build()); @@ -145,4 +145,4 @@ public class ConcurrentRebalanceRoutingTests extends ESAllocationTestCase { assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(10)); assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(0)); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java index e16e7cc2cec..cb09fb93b60 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java @@ -45,8 +45,8 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { public void testSimpleDeadNodeOnStartedPrimaryShard() { AllocationService allocation = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("--> building initial routing table"); @@ -96,8 +96,8 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { public void testDeadNodeWhileRelocatingOnToNode() { AllocationService allocation = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("--> building initial routing table"); @@ -170,8 +170,8 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { public void testDeadNodeWhileRelocatingOnFromNode() { AllocationService allocation = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("--> building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java index e7c956c4ccd..fc686f0bb5a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java @@ -43,7 +43,7 @@ public class ElectReplicaAsPrimaryDuringRelocationTests extends ESAllocationTest private final ESLogger logger = Loggers.getLogger(ElectReplicaAsPrimaryDuringRelocationTests.class); public void testElectReplicaAsPrimaryDuringRelocation() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java index 3b242d8676f..affab78521c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java @@ -41,7 +41,7 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(FailedNodeRoutingTests.class); public void testSimpleFailedNodeTest() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()).build()); MetaData metaData = MetaData.builder() diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java index 9bfaf7e9997..b8ab9c13590 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -56,8 +56,8 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFailedShardPrimaryRelocatingToAndFrom() { AllocationService allocation = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("--> building initial routing table"); @@ -144,8 +144,8 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFailPrimaryStartedCheckReplicaElected() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("Building initial routing table"); @@ -225,8 +225,8 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFirstAllocationFailureSingleNode() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("Building initial routing table"); @@ -281,8 +281,8 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testSingleShardMultipleAllocationFailures() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("Building initial routing table"); @@ -337,8 +337,8 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testFirstAllocationFailureTwoNodes() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("Building initial routing table"); @@ -397,8 +397,8 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { public void testRebalanceFailure() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java index aa6fdef828a..d5f8134d95f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java @@ -48,7 +48,7 @@ public class IndexBalanceTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); @@ -178,7 +178,7 @@ public class IndexBalanceTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); @@ -340,7 +340,7 @@ public class IndexBalanceTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 2fe1d85b1f4..2b0c7ef6bda 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -39,10 +39,15 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.VersionUtils.randomVersion; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; /** * @@ -52,8 +57,8 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { public void testDoNotAllocateFromPrimary() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -166,8 +171,8 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { public void testRandom() { AllocationService service = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -215,8 +220,8 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { public void testRollingRestart() { AllocationService service = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java index 0ac98d4f92b..d4beb7190e3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESAllocationTestCase; @@ -42,6 +43,7 @@ public class PreferPrimaryAllocationTests extends ESAllocationTestCase { logger.info("create an allocation with 1 initial recoveries"); AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 1) .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java index e994c885629..7e59ab8a6b4 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java @@ -43,7 +43,7 @@ public class PrimaryElectionRoutingTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(PrimaryElectionRoutingTests.class); public void testBackupElectionToPrimaryWhenPrimaryCanBeAllocatedToAnotherNode() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); @@ -93,7 +93,7 @@ public class PrimaryElectionRoutingTests extends ESAllocationTestCase { } public void testRemovingInitializingReplicasIfPrimariesFails() { - AllocationService allocation = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService allocation = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java index 12ff9fd3f7d..371624484ff 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java @@ -44,6 +44,7 @@ public class PrimaryNotRelocatedWhileBeingRecoveredTests extends ESAllocationTes public void testPrimaryNotRelocatedWhileBeingRecoveredFrom() { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put("cluster.routing.allocation.concurrent_source_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) .build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java index 4d5f4d07ea1..abc561a0916 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESAllocationTestCase; @@ -57,7 +58,7 @@ public class RandomAllocationDeciderTests extends ESAllocationTestCase { public void testRandomDecisions() { RandomAllocationDecider randomAllocationDecider = new RandomAllocationDecider(getRandom()); AllocationService strategy = new AllocationService(settingsBuilder().build(), new AllocationDeciders(Settings.EMPTY, - new HashSet<>(Arrays.asList(new SameShardAllocationDecider(Settings.EMPTY), + new HashSet<>(Arrays.asList(new SameShardAllocationDecider(Settings.EMPTY), new ReplicaAfterPrimaryActiveAllocationDecider(Settings.EMPTY), randomAllocationDecider))), new ShardsAllocators(NoopGatewayAllocator.INSTANCE), EmptyClusterInfoService.INSTANCE); int indices = scaledRandomIntBetween(1, 20); Builder metaBuilder = MetaData.builder(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java index fbc742573e9..4672f339c70 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java @@ -56,8 +56,8 @@ public class RebalanceAfterActiveTests extends ESAllocationTestCase { } AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), new ClusterInfoService() { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java index 0d33b5ecd46..1b8bea26dbe 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java @@ -45,7 +45,7 @@ public class ReplicaAllocatedAfterPrimaryTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(ReplicaAllocatedAfterPrimaryTests.class); public void testBackupIsAllocatedAfterPrimary() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java index eca2a227f8f..9a4e56a26b2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESAllocationTestCase; @@ -47,7 +48,7 @@ public class RoutingNodesIntegrityTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); @@ -119,7 +120,7 @@ public class RoutingNodesIntegrityTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); @@ -211,7 +212,8 @@ public class RoutingNodesIntegrityTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 1) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 3) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java index f096ab0b13d..e1586c433a5 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java @@ -41,7 +41,7 @@ public class ShardVersioningTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(ShardVersioningTests.class); public void testSimple() { - AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, + AllocationService strategy = createAllocationService(settingsBuilder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()).build()); MetaData metaData = MetaData.builder() diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java index 11d41a6a336..dd3f3f373ff 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java @@ -46,7 +46,7 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(ShardsLimitAllocationTests.class); public void testIndexLevelShardsLimitAllocate() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); @@ -89,8 +89,8 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { public void testClusterLevelShardsLimitAllocate() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE, 1) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 1) .build()); logger.info("Building initial routing table"); @@ -125,8 +125,8 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { // Bump the cluster total shards to 2 strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE, 2) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 2) .build()); logger.info("Do another reroute, make sure shards are now allocated"); @@ -147,7 +147,7 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { public void testIndexLevelShardsLimitRemain() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.balance.index", 0.0f) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java index ed44b84a886..bf41ad8a053 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java @@ -59,7 +59,7 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(SingleShardNoReplicasRoutingTests.class); public void testSingleIndexStartedShard() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); @@ -160,7 +160,7 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { } public void testSingleIndexShardFailed() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); @@ -210,8 +210,8 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { public void testMultiIndexEvenDistribution() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -322,8 +322,8 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { public void testMultiIndexUnevenNodes() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); @@ -413,4 +413,4 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { assertThat(routingNode.numberOfShardsWithState(STARTED), equalTo(2)); } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java index ff442852017..f7033ec2596 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java @@ -44,7 +44,7 @@ public class SingleShardOneReplicaRoutingTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(SingleShardOneReplicaRoutingTests.class); public void testSingleIndexFirstStartPrimaryThenBackups() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java index 28033915abe..0712e9cd02a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java @@ -51,9 +51,9 @@ public class StartedShardsRoutingTests extends ESAllocationTestCase { .nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))) .metaData(MetaData.builder().put(indexMetaData, false)); - final ShardRouting initShard = TestShardRouting.newShardRouting("test", 0, "node1", randomBoolean(), ShardRoutingState.INITIALIZING, 1); - final ShardRouting startedShard = TestShardRouting.newShardRouting("test", 1, "node2", randomBoolean(), ShardRoutingState.STARTED, 1); - final ShardRouting relocatingShard = TestShardRouting.newShardRouting("test", 2, "node1", "node2", randomBoolean(), ShardRoutingState.RELOCATING, 1); + final ShardRouting initShard = TestShardRouting.newShardRouting("test", 0, "node1", true, ShardRoutingState.INITIALIZING, 1); + final ShardRouting startedShard = TestShardRouting.newShardRouting("test", 1, "node2", true, ShardRoutingState.STARTED, 1); + final ShardRouting relocatingShard = TestShardRouting.newShardRouting("test", 2, "node1", "node2", true, ShardRoutingState.RELOCATING, 1); stateBuilder.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder("test") .addIndexShard(new IndexShardRoutingTable.Builder(initShard.shardId()).addShard(initShard).build()) .addIndexShard(new IndexShardRoutingTable.Builder(startedShard.shardId()).addShard(startedShard).build()) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java index 671cce007c9..aec81a6e063 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java @@ -50,7 +50,7 @@ public class TenShardsOneReplicaRoutingTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.balance.index", 0.0f) .put("cluster.routing.allocation.balance.replica", 1.0f) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java index 223da88192b..1d60436d3c7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java @@ -25,11 +25,16 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; +import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESAllocationTestCase; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -103,7 +108,8 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { public void testReplicaAndPrimaryRecoveryThrottling() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 3) + .put("cluster.routing.allocation.node_concurrent_recoveries", 3) + .put("cluster.routing.allocation.concurrent_source_recoveries", 3) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 3) .build()); @@ -169,4 +175,157 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(0)); assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); } + + public void testThrottleIncomingAndOutgoing() { + Settings settings = settingsBuilder() + .put("cluster.routing.allocation.node_concurrent_recoveries", 5) + .put("cluster.routing.allocation.node_initial_primaries_recoveries", 5) + .put("cluster.routing.allocation.cluster_concurrent_rebalance", 5) + .build(); + AllocationService strategy = createAllocationService(settings); + logger.info("Building initial routing table"); + + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(9).numberOfReplicas(0)) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); + + logger.info("start one node, do reroute, only 5 should initialize"); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1"))).build(); + routingTable = strategy.reroute(clusterState, "reroute").routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(0)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(5)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(4)); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 5); + + logger.info("start initializing, all primaries should be started"); + routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(5)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(4)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + + routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + logger.info("start another 2 nodes, 5 shards should be relocating - at most 5 are allowed per node"); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).put(newNode("node2")).put(newNode("node3"))).build(); + routingTable = strategy.reroute(clusterState, "reroute").routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(4)); + assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(5)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(5)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 3); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 2); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 5); + + routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + logger.info("start the relocating shards, one more shard should relocate away from node1"); + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(8)); + assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1); + } + + public void testOutgoingThrottlesAllocaiton() { + Settings settings = settingsBuilder() + .put("cluster.routing.allocation.node_concurrent_recoveries", 1) + .put("cluster.routing.allocation.node_initial_primaries_recoveries", 1) + .put("cluster.routing.allocation.cluster_concurrent_rebalance", 1) + .build(); + AllocationService strategy = createAllocationService(settings); + + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(3).numberOfReplicas(0)) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); + + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2")).put(newNode("node3"))).build(); + routingTable = strategy.reroute(clusterState, "reroute").routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(0)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(3)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 1); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); + + routingTable = strategy.applyStartedShards(clusterState, routingTable.shardsWithState(INITIALIZING)).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); + + RoutingAllocation.Result reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node1").get(0).shardId(), "node1", "node2"))); + assertEquals(reroute.explanations().explanations().size(), 1); + assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.YES); + routingTable = reroute.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); + + // outgoing throttles + reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node3").get(0).shardId(), "node3", "node1")), true); + assertEquals(reroute.explanations().explanations().size(), 1); + assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(2)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + + // incoming throttles + reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node3").get(0).shardId(), "node3", "node2")), true); + assertEquals(reroute.explanations().explanations().size(), 1); + assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE); + + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node2"), 1); + assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node3"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node1"), 1); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); + assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); + assertThat(routingTable.shardsWithState(STARTED).size(), equalTo(2)); + assertThat(routingTable.shardsWithState(INITIALIZING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(RELOCATING).size(), equalTo(1)); + assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); + + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java index 7fa27e7050c..5ff5af4e4cd 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java @@ -46,7 +46,7 @@ public class UpdateNumberOfReplicasTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(UpdateNumberOfReplicasTests.class); public void testUpdateNumberOfReplicas() { - AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build()); + AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index a739f30856a..fa52503eac5 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -29,7 +29,14 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; @@ -48,9 +55,14 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class DiskThresholdDeciderTests extends ESAllocationTestCase { @@ -60,9 +72,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testDiskThreshold() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, 0.7) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.8).build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), 0.7) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.8).build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node1", new DiskUsage("node1", "node1", "/dev/null", 100, 10)); // 90% used @@ -95,8 +107,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } }; AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -170,9 +182,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // Set the high threshold to 70 instead of 80 // node2 now should not have new shards allocated to it, but shards can remain diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "60%") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.7).build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "60%") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.7).build(); deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -180,8 +192,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { new DiskThresholdDecider(diskSettings)))); strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -201,9 +213,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // Set the high threshold to 60 instead of 70 // node2 now should not have new shards allocated to it, and shards cannot remain diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, 0.5) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.6).build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), 0.5) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.6).build(); deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -211,8 +223,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { new DiskThresholdDecider(diskSettings)))); strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -254,9 +266,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testDiskThresholdWithAbsoluteSizes() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "30b") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "9b").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "30b") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "9b").build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node1", new DiskUsage("node1", "n1", "/dev/null", 100, 10)); // 90% used @@ -291,8 +303,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { }; AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -348,8 +360,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } }; strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -405,9 +417,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // Set the high threshold to 70 instead of 80 // node2 now should not have new shards allocated to it, but shards can remain diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "40b") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "30b").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "40b") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "30b").build(); deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -415,8 +427,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { new DiskThresholdDecider(diskSettings)))); strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -436,9 +448,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // Set the high threshold to 60 instead of 70 // node2 now should not have new shards allocated to it, and shards cannot remain diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "50b") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "40b").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "50b") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "40b").build(); deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -446,8 +458,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { new DiskThresholdDecider(diskSettings)))); strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -522,9 +534,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testDiskThresholdWithShardSizes() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, 0.7) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "71%").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), 0.7) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "71%").build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node1", new DiskUsage("node1", "n1", "/dev/null", 100, 31)); // 69% used @@ -555,8 +567,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { }; AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -589,9 +601,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testUnknownDiskUsage() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, 0.7) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.85).build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), 0.7) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.85).build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node2", new DiskUsage("node2", "node2", "/dev/null", 100, 50)); // 50% used @@ -623,8 +635,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { }; AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -688,10 +700,10 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testShardRelocationsTakenIntoAccount() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, 0.7) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.8).build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), 0.7) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.8).build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node1", new DiskUsage("node1", "n1", "/dev/null", 100, 40)); // 60% used @@ -726,8 +738,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { }; AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); @@ -794,10 +806,10 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testCanRemainWithShardRelocatingAway() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "60%") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "70%").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "60%") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "70%").build(); // We have an index with 2 primary shards each taking 40 bytes. Each node has 100 bytes available ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); @@ -888,8 +900,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { new SameShardAllocationDecider(Settings.EMPTY), diskThresholdDecider ))); AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); // Ensure that the reroute call doesn't alter the routing table, since the first primary is relocating away @@ -906,10 +918,10 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { public void testForSingleDataNode() { Settings diskSettings = settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, true) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "60%") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "70%").build(); + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.getKey(), true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "60%") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "70%").build(); ImmutableOpenMap.Builder usagesBuilder = ImmutableOpenMap.builder(); usagesBuilder.put("node1", new DiskUsage("node1", "n1", "/dev/null", 100, 100)); // 0% used @@ -988,8 +1000,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ))); AllocationService strategy = new AllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") + .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build(), deciders, makeShardsAllocators(), cis); RoutingAllocation.Result result = strategy.reroute(clusterState, "reroute"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index a386883ad1b..8551af718e2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -20,20 +20,28 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterInfo; +import org.elasticsearch.cluster.ClusterInfoService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.DiskUsage; +import org.elasticsearch.cluster.EmptyClusterInfoService; import org.elasticsearch.cluster.MockInternalClusterInfoService.DevNullClusterInfo; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingHelper; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -45,7 +53,7 @@ import static org.hamcrest.CoreMatchers.equalTo; */ public class DiskThresholdDeciderUnitTests extends ESTestCase { public void testDynamicSettings() { - NodeSettingsService nss = new NodeSettingsService(Settings.EMPTY); + ClusterSettings nss = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); @@ -59,18 +67,15 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { assertTrue(decider.isEnabled()); assertTrue(decider.isIncludeRelocations()); - DiskThresholdDecider.ApplySettings applySettings = decider.newApplySettings(); - Settings newSettings = Settings.builder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, false) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, false) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "70%") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "500mb") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, "30s") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), false) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING.getKey(), false) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "70%") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "500mb") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.getKey(), "30s") .build(); - applySettings.onRefreshSettings(newSettings); - + nss.applySettings(newSettings); assertThat("high threshold bytes should be unset", decider.getFreeBytesThresholdHigh(), equalTo(ByteSizeValue.parseBytesSizeValue("0b", "test"))); assertThat("high threshold percentage should be changed", @@ -86,7 +91,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { } public void testCanAllocateUsesMaxAvailableSpace() { - NodeSettingsService nss = new NodeSettingsService(Settings.EMPTY); + ClusterSettings nss = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); @@ -127,7 +132,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { } public void testCanRemainUsesLeastAvailableSpace() { - NodeSettingsService nss = new NodeSettingsService(Settings.EMPTY); + ClusterSettings nss = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); ImmutableOpenMap.Builder shardRoutingMap = ImmutableOpenMap.builder(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java index 940634a4657..be64aafc61e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java @@ -37,7 +37,7 @@ public class EnableAllocationDeciderIT extends ESIntegTestCase { public void testEnableRebalance() throws InterruptedException { final String firstNode = internalCluster().startNode(); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE)).get(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE)).get(); // we test with 2 shards since otherwise it's pretty fragile if there are difference in the num or shards such that // all shards are relocated to the second node which is not what we want here. It's solely a test for the settings to take effect final int numShards = 2; @@ -64,7 +64,7 @@ public class EnableAllocationDeciderIT extends ESIntegTestCase { assertThat("index: [test] expected to be rebalanced on both nodes", test.size(), equalTo(2)); // flip the cluster wide setting such that we can also balance for index test_1 eventually we should have one shard of each index on each node - client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, randomBoolean() ? EnableAllocationDecider.Rebalance.PRIMARIES : EnableAllocationDecider.Rebalance.ALL)).get(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), randomBoolean() ? EnableAllocationDecider.Rebalance.PRIMARIES : EnableAllocationDecider.Rebalance.ALL)).get(); logger.info("--> balance index [test_1]"); client().admin().cluster().prepareReroute().get(); ensureGreen("test_1"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java index 0049a120777..b2559c29ed2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java @@ -20,9 +20,7 @@ package org.elasticsearch.cluster.routing.allocation.decider; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -34,8 +32,8 @@ import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDeci import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.Rebalance; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.test.ESAllocationTestCase; import java.util.EnumSet; @@ -44,8 +42,8 @@ import java.util.List; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; -import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE; -import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE; +import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING; +import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING; import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.equalTo; @@ -58,7 +56,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { public void testClusterEnableNone() { AllocationService strategy = createAllocationService(settingsBuilder() - .put(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.NONE.name()) + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE.name()) .build()); logger.info("Building initial routing table"); @@ -86,7 +84,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { public void testClusterEnableOnlyPrimaries() { AllocationService strategy = createAllocationService(settingsBuilder() - .put(CLUSTER_ROUTING_ALLOCATION_ENABLE, Allocation.PRIMARIES.name()) + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.PRIMARIES.name()) .build()); logger.info("Building initial routing table"); @@ -159,11 +157,12 @@ public class EnableAllocationTests extends ESAllocationTestCase { final boolean useClusterSetting = randomBoolean(); final Rebalance allowedOnes = RandomPicks.randomFrom(getRandom(), EnumSet.of(Rebalance.PRIMARIES, Rebalance.REPLICAS, Rebalance.ALL)); Settings build = settingsBuilder() - .put(CLUSTER_ROUTING_REBALANCE_ENABLE, useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings - .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, 3) + .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings + .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 3) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 10) .build(); - NodeSettingsService nodeSettingsService = new NodeSettingsService(build); - AllocationService strategy = createAllocationService(build, nodeSettingsService, getRandom()); + ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + AllocationService strategy = createAllocationService(build, clusterSettings, getRandom()); Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE).build(); logger.info("Building initial routing table"); @@ -213,7 +212,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { if (useClusterSetting) { prevState = clusterState; clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder(metaData).transientSettings(settingsBuilder() - .put(CLUSTER_ROUTING_REBALANCE_ENABLE, allowedOnes) + .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), allowedOnes) .build())).build(); } else { prevState = clusterState; @@ -224,7 +223,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { .build(); } - nodeSettingsService.clusterChanged(new ClusterChangedEvent("foo", clusterState, prevState)); + clusterSettings.applySettings(clusterState.metaData().settings()); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat("expected 6 shards to be started 2 to relocate useClusterSettings: " + useClusterSetting, clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(6)); @@ -261,11 +260,11 @@ public class EnableAllocationTests extends ESAllocationTestCase { public void testEnableClusterBalanceNoReplicas() { final boolean useClusterSetting = randomBoolean(); Settings build = settingsBuilder() - .put(CLUSTER_ROUTING_REBALANCE_ENABLE, useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings - .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, 3) + .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings + .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 3) .build(); - NodeSettingsService nodeSettingsService = new NodeSettingsService(build); - AllocationService strategy = createAllocationService(build, nodeSettingsService, getRandom()); + ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + AllocationService strategy = createAllocationService(build, clusterSettings, getRandom()); Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE).build(); logger.info("Building initial routing table"); @@ -307,7 +306,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { if (useClusterSetting) { prevState = clusterState; clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder(metaData).transientSettings(settingsBuilder() - .put(CLUSTER_ROUTING_REBALANCE_ENABLE, randomBoolean() ? Rebalance.PRIMARIES : Rebalance.ALL) + .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), randomBoolean() ? Rebalance.PRIMARIES : Rebalance.ALL) .build())).build(); } else { prevState = clusterState; @@ -315,7 +314,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder(metaData).removeAllIndices() .put(IndexMetaData.builder(meta).settings(settingsBuilder().put(meta.getSettings()).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, randomBoolean() ? Rebalance.PRIMARIES : Rebalance.ALL).build()))).build(); } - nodeSettingsService.clusterChanged(new ClusterChangedEvent("foo", clusterState, prevState)); + clusterSettings.applySettings(clusterState.metaData().settings()); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat("expected 4 primaries to be started and 2 to relocate useClusterSettings: " + useClusterSetting, clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(4)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index 126799f5937..5d673e8d60c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -24,10 +24,9 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.DiskUsage; -import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.MockInternalClusterInfoService; import org.elasticsearch.cluster.routing.RoutingNode; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -46,22 +45,12 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class MockDiskUsagesIT extends ESIntegTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - // Update more frequently - .put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, "1s") - .build(); - } - @Override protected Collection> nodePlugins() { // Use the mock internal cluster info service, which has fake-able disk usages return pluginList(MockInternalClusterInfoService.TestPlugin.class); } - //@TestLogging("org.elasticsearch.cluster:TRACE,org.elasticsearch.cluster.routing.allocation.decider:TRACE") public void testRerouteOccursOnDiskPassingHighWatermark() throws Exception { List nodes = internalCluster().startNodesAsync(3).get(); @@ -77,15 +66,16 @@ public class MockDiskUsagesIT extends ESIntegTestCase { // Start with all nodes at 50% usage final MockInternalClusterInfoService cis = (MockInternalClusterInfoService) internalCluster().getInstance(ClusterInfoService.class, internalCluster().getMasterName()); + cis.setUpdateFrequency(TimeValue.timeValueMillis(200)); + cis.onMaster(); cis.setN1Usage(nodes.get(0), new DiskUsage(nodes.get(0), "n1", "/dev/null", 100, 50)); cis.setN2Usage(nodes.get(1), new DiskUsage(nodes.get(1), "n2", "/dev/null", 100, 50)); cis.setN3Usage(nodes.get(2), new DiskUsage(nodes.get(2), "n3", "/dev/null", 100, 50)); client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder() - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, randomFrom("20b", "80%")) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, randomFrom("10b", "90%")) - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, "1ms")).get(); - + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), randomFrom("20b", "80%")) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), randomFrom("10b", "90%")) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.getKey(), "1ms")).get(); // Create an index with 10 shards so we can check allocation for it prepareCreate("test").setSettings(settingsBuilder() .put("number_of_shards", 10) diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 65d5b0b9fcd..fba6d8127b8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -32,7 +32,6 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.hamcrest.Matchers; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; @@ -48,22 +47,142 @@ public class ClusterSettingsIT extends ESIntegTestCase { public void testClusterNonExistingSettingsUpdate() { String key1 = "no_idea_what_you_are_talking_about"; int value1 = 10; + try { + client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(key1, value1).build()) + .get(); + fail("bogus value"); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "transient setting [no_idea_what_you_are_talking_about], not dynamically updateable"); + } + } + + public void testDeleteIsAppliedFirst() { + DiscoverySettings discoverySettings = internalCluster().getInstance(DiscoverySettings.class); + + assertEquals(discoverySettings.getPublishTimeout(), DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY)); + assertTrue(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY)); + + ClusterUpdateSettingsResponse response = client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder() + .put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), false) + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s").build()) + .get(); + + assertAcked(response); + assertEquals(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), "1s"); + assertTrue(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY)); + assertFalse(response.getTransientSettings().getAsBoolean(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), null)); + + response = client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().putNull((randomBoolean() ? "discovery.zen.*" : "*")).put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "2s")) + .get(); + assertEquals(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), "2s"); + assertNull(response.getTransientSettings().getAsBoolean(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), null)); + } + + public void testResetClusterSetting() { + DiscoverySettings discoverySettings = internalCluster().getInstance(DiscoverySettings.class); + + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); ClusterUpdateSettingsResponse response = client().admin().cluster() .prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(key1, value1).build()) + .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s").build()) .get(); assertAcked(response); - assertThat(response.getTransientSettings().getAsMap().entrySet(), Matchers.emptyIterable()); + assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); + + + response = client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().putNull(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())) + .get(); + + assertAcked(response); + assertNull(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); + + response = client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder() + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") + .put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), false).build()) + .get(); + + assertAcked(response); + assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); + assertFalse(discoverySettings.getPublishDiff()); + response = client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().putNull((randomBoolean() ? "discovery.zen.*" : "*"))) + .get(); + + assertNull(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertNull(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey())); + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); + + // now persistent + response = client().admin().cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s").build()) + .get(); + + assertAcked(response); + assertThat(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); + + + response = client().admin().cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().putNull((DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()))) + .get(); + + assertAcked(response); + assertNull(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); + + + response = client().admin().cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder() + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") + .put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), false).build()) + .get(); + + assertAcked(response); + assertThat(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); + assertFalse(discoverySettings.getPublishDiff()); + response = client().admin().cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().putNull((randomBoolean() ? "discovery.zen.*" : "*"))) + .get(); + + assertNull(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertNull(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey())); + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); + assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); } public void testClusterSettingsUpdateResponse() { - String key1 = IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC; + String key1 = IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(); int value1 = 10; - String key2 = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE; - boolean value2 = false; + String key2 = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(); + String value2 = EnableAllocationDecider.Allocation.NONE.name(); Settings transientSettings1 = Settings.builder().put(key1, value1, ByteSizeUnit.BYTES).build(); Settings persistentSettings1 = Settings.builder().put(key2, value2).build(); @@ -114,43 +233,59 @@ public class ClusterSettingsIT extends ESIntegTestCase { assertThat(response3.getPersistentSettings().get(key2), notNullValue()); } + public void testCanUpdateTracerSettings() { + ClusterUpdateSettingsResponse clusterUpdateSettingsResponse = client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().putArray("transport.tracer.include", "internal:index/shard/recovery/*", + "internal:gateway/local*")) + .get(); + assertArrayEquals(clusterUpdateSettingsResponse.getTransientSettings().getAsArray("transport.tracer.include"), new String[] {"internal:index/shard/recovery/*", + "internal:gateway/local*"}); + } + public void testUpdateDiscoveryPublishTimeout() { DiscoverySettings discoverySettings = internalCluster().getInstance(DiscoverySettings.class); - assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.DEFAULT_PUBLISH_TIMEOUT)); + assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); ClusterUpdateSettingsResponse response = client().admin().cluster() .prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "1s").build()) + .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s").build()) .get(); assertAcked(response); - assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT), equalTo("1s")); + assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); - response = client().admin().cluster() - .prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "whatever").build()) - .get(); + try { + client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "whatever").build()) + .get(); + fail("bogus value"); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [discovery.zen.commit_timeout] with value [whatever] as a time value: unit is missing or unrecognized"); + } - assertAcked(response); - assertThat(response.getTransientSettings().getAsMap().entrySet(), Matchers.emptyIterable()); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); - response = client().admin().cluster() - .prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, -1).build()) - .get(); + try { + client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), -1).build()) + .get(); + fail("bogus value"); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse value [-1] for setting [discovery.zen.publish_timeout] must be >= 0s"); + } - assertAcked(response); - assertThat(response.getTransientSettings().getAsMap().entrySet(), Matchers.emptyIterable()); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); } public void testClusterUpdateSettingsWithBlocks() { String key1 = "cluster.routing.allocation.enable"; - Settings transientSettings = Settings.builder().put(key1, false).build(); + Settings transientSettings = Settings.builder().put(key1, EnableAllocationDecider.Allocation.NONE.name()).build(); String key2 = "cluster.routing.allocation.node_concurrent_recoveries"; Settings persistentSettings = Settings.builder().put(key2, "5").build(); @@ -165,7 +300,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { assertBlocked(request, MetaData.CLUSTER_READ_ONLY_BLOCK); // But it's possible to update the settings to update the "cluster.blocks.read_only" setting - Settings settings = settingsBuilder().put(MetaData.SETTING_READ_ONLY, false).build(); + Settings settings = settingsBuilder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), false).build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get()); } finally { diff --git a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java index c5a695d16e5..ced1e0097a1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java @@ -224,8 +224,8 @@ public class RoutingIteratorTests extends ESAllocationTestCase { public void testAttributePreferenceRouting() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id,zone") .build()); @@ -279,8 +279,8 @@ public class RoutingIteratorTests extends ESAllocationTestCase { public void testNodeSelectorRouting(){ AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); MetaData metaData = MetaData.builder() @@ -336,7 +336,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase { public void testShardsAndPreferNodeRouting() { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .build()); MetaData metaData = MetaData.builder() @@ -397,7 +397,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase { public void testReplicaShardPreferenceIters() throws Exception { AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .build()); OperationRouting operationRouting = new OperationRouting(Settings.Builder.EMPTY_SETTINGS, new AwarenessAllocationDecider()); @@ -479,4 +479,4 @@ public class RoutingIteratorTests extends ESAllocationTestCase { assertTrue(routing.primary()); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java index 697365151f7..e5d27b872fb 100644 --- a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.codecs; import org.apache.lucene.codecs.Codec; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -43,10 +44,14 @@ public class CodecTests extends ESSingleNodeTestCase { .endObject().endObject().string(); int i = 0; for (Version v : VersionUtils.allVersions()) { + if (v.onOrAfter(Version.V_2_0_0) == false) { + // no need to test, we don't support upgrading from these versions + continue; + } IndexService indexService = createIndex("test-" + i++, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build()); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); if (v.onOrAfter(Version.V_2_0_0_beta1)) { fail("Elasticsearch 2.0 should not support custom postings formats"); } @@ -66,10 +71,14 @@ public class CodecTests extends ESSingleNodeTestCase { .endObject().endObject().string(); int i = 0; for (Version v : VersionUtils.allVersions()) { + if (v.onOrAfter(Version.V_2_0_0) == false) { + // no need to test, we don't support upgrading from these versions + continue; + } IndexService indexService = createIndex("test-" + i++, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build()); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); if (v.onOrAfter(Version.V_2_0_0_beta1)) { fail("Elasticsearch 2.0 should not support custom postings formats"); } diff --git a/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java b/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java index fa4ce357a52..bb9d23db1cb 100644 --- a/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java +++ b/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java @@ -19,12 +19,12 @@ package org.elasticsearch.common.breaker; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.indices.breaker.BreakerSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.test.ESTestCase; import java.util.concurrent.atomic.AtomicBoolean; @@ -87,7 +87,7 @@ public class MemoryCircuitBreakerTests extends ESTestCase { final AtomicReference lastException = new AtomicReference<>(null); final AtomicReference breakerRef = new AtomicReference<>(null); - final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new NodeSettingsService(Settings.EMPTY)) { + final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) { @Override public CircuitBreaker getBreaker(String name) { @@ -147,7 +147,7 @@ public class MemoryCircuitBreakerTests extends ESTestCase { final AtomicInteger parentTripped = new AtomicInteger(0); final AtomicReference breakerRef = new AtomicReference<>(null); - final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new NodeSettingsService(Settings.EMPTY)) { + final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) { @Override public CircuitBreaker getBreaker(String name) { diff --git a/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java b/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java index 48b1e899eb3..a272b6627e4 100644 --- a/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java +++ b/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.bytes; -import java.nio.charset.StandardCharsets; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; @@ -36,6 +35,7 @@ import java.nio.channels.GatheringByteChannel; import java.nio.charset.CharacterCodingException; import java.nio.charset.CharsetDecoder; import java.nio.charset.CoderResult; +import java.nio.charset.StandardCharsets; /** * Note: this is only used by one lone test method. diff --git a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java index 38090403668..0985bc4b88e 100644 --- a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java +++ b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java @@ -24,8 +24,18 @@ import org.junit.Before; import java.lang.management.ManagementFactory; import java.lang.management.ThreadMXBean; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReferenceArray; @@ -482,35 +492,43 @@ public class CacheTests extends ESTestCase { } public void testComputeIfAbsentCallsOnce() throws InterruptedException { - int numberOfThreads = randomIntBetween(2, 200); + int numberOfThreads = randomIntBetween(2, 32); final Cache cache = CacheBuilder.builder().build(); - List threads = new ArrayList<>(); AtomicReferenceArray flags = new AtomicReferenceArray(numberOfEntries); for (int j = 0; j < numberOfEntries; j++) { flags.set(j, false); } - CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); + CountDownLatch startGate = new CountDownLatch(1); + CountDownLatch endGate = new CountDownLatch(numberOfThreads); + AtomicBoolean interrupted = new AtomicBoolean(); for (int i = 0; i < numberOfThreads; i++) { Thread thread = new Thread(() -> { - latch.countDown(); - for (int j = 0; j < numberOfEntries; j++) { + try { try { - cache.computeIfAbsent(j, key -> { - assertTrue(flags.compareAndSet(key, false, true)); - return Integer.toString(key); - }); - } catch (ExecutionException e) { - throw new RuntimeException(e); + startGate.await(); + } catch (InterruptedException e) { + interrupted.set(true); + return; } + for (int j = 0; j < numberOfEntries; j++) { + try { + cache.computeIfAbsent(j, key -> { + assertTrue(flags.compareAndSet(key, false, true)); + return Integer.toString(key); + }); + } catch (ExecutionException e) { + throw new RuntimeException(e); + } + } + } finally { + endGate.countDown(); } }); - threads.add(thread); thread.start(); } - latch.countDown(); - for (Thread thread : threads) { - thread.join(); - } + startGate.countDown(); + endGate.await(); + assertFalse(interrupted.get()); } public void testComputeIfAbsentThrowsExceptionIfLoaderReturnsANullValue() { @@ -548,32 +566,41 @@ public class CacheTests extends ESTestCase { } } - int numberOfThreads = randomIntBetween(2, 256); + int numberOfThreads = randomIntBetween(2, 32); final Cache cache = CacheBuilder.builder().build(); - CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); + CountDownLatch startGate = new CountDownLatch(1); CountDownLatch deadlockLatch = new CountDownLatch(numberOfThreads); + AtomicBoolean interrupted = new AtomicBoolean(); List threads = new ArrayList<>(); for (int i = 0; i < numberOfThreads; i++) { Thread thread = new Thread(() -> { - Random random = new Random(random().nextLong()); - latch.countDown(); - for (int j = 0; j < numberOfEntries; j++) { - Key key = new Key(random.nextInt(numberOfEntries)); + try { try { - cache.computeIfAbsent(key, k -> { - if (k.key == 0) { - return 0; - } else { - Integer value = cache.get(new Key(k.key / 2)); - return value != null ? value : 0; - } - }); - } catch (ExecutionException e) { - fail(e.getMessage()); + startGate.await(); + } catch (InterruptedException e) { + interrupted.set(true); + return; } + Random random = new Random(random().nextLong()); + for (int j = 0; j < numberOfEntries; j++) { + Key key = new Key(random.nextInt(numberOfEntries)); + try { + cache.computeIfAbsent(key, k -> { + if (k.key == 0) { + return 0; + } else { + Integer value = cache.get(new Key(k.key / 2)); + return value != null ? value : 0; + } + }); + } catch (ExecutionException e) { + fail(e.getMessage()); + } + } + } finally { + // successfully avoided deadlock, release the main thread + deadlockLatch.countDown(); } - // successfully avoided deadlock, release the main thread - deadlockLatch.countDown(); }); threads.add(thread); thread.start(); @@ -604,7 +631,7 @@ public class CacheTests extends ESTestCase { }, 1, 1, TimeUnit.SECONDS); // everything is setup, release the hounds - latch.countDown(); + startGate.countDown(); // wait for either deadlock to be detected or the threads to terminate deadlockLatch.await(); @@ -616,81 +643,98 @@ public class CacheTests extends ESTestCase { } public void testCachePollution() throws InterruptedException { - int numberOfThreads = randomIntBetween(2, 200); + int numberOfThreads = randomIntBetween(2, 32); final Cache cache = CacheBuilder.builder().build(); - CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); - List threads = new ArrayList<>(); + CountDownLatch startGate = new CountDownLatch(1); + CountDownLatch endGate = new CountDownLatch(numberOfThreads); + AtomicBoolean interrupted = new AtomicBoolean(); for (int i = 0; i < numberOfThreads; i++) { Thread thread = new Thread(() -> { - latch.countDown(); - Random random = new Random(random().nextLong()); - for (int j = 0; j < numberOfEntries; j++) { - Integer key = random.nextInt(numberOfEntries); - boolean first; - boolean second; - do { - first = random.nextBoolean(); - second = random.nextBoolean(); - } while (first && second); - if (first) { - try { - cache.computeIfAbsent(key, k -> { - if (random.nextBoolean()) { - return Integer.toString(k); - } else { - throw new Exception("testCachePollution"); - } - }); - } catch (ExecutionException e) { - assertNotNull(e.getCause()); - assertThat(e.getCause(), instanceOf(Exception.class)); - assertEquals(e.getCause().getMessage(), "testCachePollution"); - } - } else if (second) { - cache.invalidate(key); - } else { - cache.get(key); + try { + try { + startGate.await(); + } catch (InterruptedException e) { + interrupted.set(true); + return; } + Random random = new Random(random().nextLong()); + for (int j = 0; j < numberOfEntries; j++) { + Integer key = random.nextInt(numberOfEntries); + boolean first; + boolean second; + do { + first = random.nextBoolean(); + second = random.nextBoolean(); + } while (first && second); + if (first) { + try { + cache.computeIfAbsent(key, k -> { + if (random.nextBoolean()) { + return Integer.toString(k); + } else { + throw new Exception("testCachePollution"); + } + }); + } catch (ExecutionException e) { + assertNotNull(e.getCause()); + assertThat(e.getCause(), instanceOf(Exception.class)); + assertEquals(e.getCause().getMessage(), "testCachePollution"); + } + } else if (second) { + cache.invalidate(key); + } else { + cache.get(key); + } + } + } finally { + endGate.countDown(); } }); - threads.add(thread); thread.start(); } - latch.countDown(); - for (Thread thread : threads) { - thread.join(); - } + startGate.countDown(); + endGate.await(); + assertFalse(interrupted.get()); } // test that the cache is not corrupted under lots of concurrent modifications, even hitting the same key // here be dragons: this test did catch one subtle bug during development; do not remove lightly public void testTorture() throws InterruptedException { - int numberOfThreads = randomIntBetween(2, 200); + int numberOfThreads = randomIntBetween(2, 32); final Cache cache = CacheBuilder.builder() .setMaximumWeight(1000) .weigher((k, v) -> 2) .build(); - CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); - List threads = new ArrayList<>(); + CountDownLatch startGate = new CountDownLatch(1); + CountDownLatch endGate = new CountDownLatch(numberOfThreads); + AtomicBoolean interrupted = new AtomicBoolean(); for (int i = 0; i < numberOfThreads; i++) { Thread thread = new Thread(() -> { - Random random = new Random(random().nextLong()); - latch.countDown(); - for (int j = 0; j < numberOfEntries; j++) { - Integer key = random.nextInt(numberOfEntries); - cache.put(key, Integer.toString(j)); + try { + try { + startGate.await(); + } catch (InterruptedException e) { + interrupted.set(true); + return; + } + Random random = new Random(random().nextLong()); + for (int j = 0; j < numberOfEntries; j++) { + Integer key = random.nextInt(numberOfEntries); + cache.put(key, Integer.toString(j)); + } + } finally { + endGate.countDown(); } }); - threads.add(thread); thread.start(); } - latch.countDown(); - for (Thread thread : threads) { - thread.join(); - } + startGate.countDown(); + endGate.await(); + assertFalse(interrupted.get()); + cache.refresh(); assertEquals(500, cache.count()); } diff --git a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java index dcbbc1ed337..259ee109f0f 100644 --- a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java +++ b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java @@ -24,7 +24,6 @@ import java.util.List; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; /** * diff --git a/core/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java b/core/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java index 90972185e0b..f574c8d9fe5 100644 --- a/core/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java @@ -21,7 +21,12 @@ package org.elasticsearch.common.collect; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; public class IteratorsTests extends ESTestCase { public void testConcatentation() { diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java index 94666d8c252..d65137b21b9 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java @@ -26,7 +26,14 @@ import com.spatial4j.core.shape.Shape; import com.spatial4j.core.shape.ShapeCollection; import com.spatial4j.core.shape.jts.JtsGeometry; import com.spatial4j.core.shape.jts.JtsPoint; -import com.vividsolutions.jts.geom.*; +import com.vividsolutions.jts.geom.Coordinate; +import com.vividsolutions.jts.geom.Geometry; +import com.vividsolutions.jts.geom.GeometryFactory; +import com.vividsolutions.jts.geom.LineString; +import com.vividsolutions.jts.geom.LinearRing; +import com.vividsolutions.jts.geom.MultiLineString; +import com.vividsolutions.jts.geom.Point; +import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -312,7 +319,7 @@ public class GeoJSONShapeParserTests extends ESTestCase { XContentParser parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); Shape shape = ShapeBuilder.parse(parser).build(); - + ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: ccw poly crossing dateline @@ -332,7 +339,7 @@ public class GeoJSONShapeParserTests extends ESTestCase { parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); - + ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 3: cw poly not crossing dateline @@ -484,7 +491,7 @@ public class GeoJSONShapeParserTests extends ESTestCase { public void testParse_invalidPolygon() throws IOException { /** - * The following 3 test cases ensure proper error handling of invalid polygons + * The following 3 test cases ensure proper error handling of invalid polygons * per the GeoJSON specification */ // test case 1: create an invalid polygon with only 2 points diff --git a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java index 6a73717fa2f..ac439ff12e0 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java @@ -28,7 +28,6 @@ import com.spatial4j.core.shape.impl.PointImpl; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.LineString; import com.vividsolutions.jts.geom.Polygon; - import org.elasticsearch.common.geo.builders.LineStringBuilder; import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index f15a731e86e..279e31aadd4 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -23,14 +23,20 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public abstract class AbstractShapeBuilderTestCase extends ESTestCase { @@ -47,6 +53,12 @@ public abstract class AbstractShapeBuilderTestCase exte namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, GeometryCollectionBuilder.PROTOTYPE); } } @@ -63,7 +75,7 @@ public abstract class AbstractShapeBuilderTestCase exte /** * mutate the given shape so the returned shape is different */ - protected abstract SB mutate(SB original) throws IOException; + protected abstract SB createMutation(SB original) throws IOException; /** * Test that creates new shape from a random test shape and checks both for equality @@ -89,19 +101,21 @@ public abstract class AbstractShapeBuilderTestCase exte /** * Test serialization and deserialization of the test shape. */ + @SuppressWarnings("unchecked") public void testSerialization() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { SB testShape = createTestShapeBuilder(); - SB deserializedShape = copyShape(testShape); - assertEquals(deserializedShape, testShape); - assertEquals(deserializedShape.hashCode(), testShape.hashCode()); - assertNotSame(deserializedShape, testShape); + SB deserializedShape = (SB) copyShape(testShape); + assertEquals(testShape, deserializedShape); + assertEquals(testShape.hashCode(), deserializedShape.hashCode()); + assertNotSame(testShape, deserializedShape); } } /** * Test equality and hashCode properties */ + @SuppressWarnings("unchecked") public void testEqualsAndHashcode() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { SB firstShape = createTestShapeBuilder(); @@ -110,15 +124,15 @@ public abstract class AbstractShapeBuilderTestCase exte assertTrue("shape is not equal to self", firstShape.equals(firstShape)); assertThat("same shape's hashcode returns different values if called multiple times", firstShape.hashCode(), equalTo(firstShape.hashCode())); - assertThat("different shapes should not be equal", mutate(firstShape), not(equalTo(firstShape))); + assertThat("different shapes should not be equal", createMutation(firstShape), not(equalTo(firstShape))); - SB secondShape = copyShape(firstShape); + SB secondShape = (SB) copyShape(firstShape); assertTrue("shape is not equal to self", secondShape.equals(secondShape)); assertTrue("shape is not equal to its copy", firstShape.equals(secondShape)); assertTrue("equals is not symmetric", secondShape.equals(firstShape)); assertThat("shape copy's hashcode is different from original hashcode", secondShape.hashCode(), equalTo(firstShape.hashCode())); - SB thirdShape = copyShape(secondShape); + SB thirdShape = (SB) copyShape(secondShape); assertTrue("shape is not equal to self", thirdShape.equals(thirdShape)); assertTrue("shape is not equal to its copy", secondShape.equals(thirdShape)); assertThat("shape copy's hashcode is different from original hashcode", secondShape.hashCode(), equalTo(thirdShape.hashCode())); @@ -129,14 +143,12 @@ public abstract class AbstractShapeBuilderTestCase exte } } - protected SB copyShape(SB original) throws IOException { + static ShapeBuilder copyShape(ShapeBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { ShapeBuilder prototype = (ShapeBuilder) namedWriteableRegistry.getPrototype(ShapeBuilder.class, original.getWriteableName()); - @SuppressWarnings("unchecked") - SB copy = (SB) prototype.readFrom(in); - return copy; + return prototype.readFrom(in); } } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java similarity index 82% rename from core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java rename to core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java index 6b102b87b2c..bd90fefc922 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/CircleBuilderTests.java @@ -20,25 +20,24 @@ package org.elasticsearch.common.geo.builders; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.common.unit.DistanceUnit; import java.io.IOException; -public class CirlceBuilderTests extends AbstractShapeBuilderTestCase { +public class CircleBuilderTests extends AbstractShapeBuilderTestCase { @Override protected CircleBuilder createTestShapeBuilder() { - double centerX = randomDoubleBetween(-180, 180, false); - double centerY = randomDoubleBetween(-90, 90, false); - return new CircleBuilder() - .center(new Coordinate(centerX, centerY)) - .radius(randomDoubleBetween(0.1, 10.0, false), randomFrom(DistanceUnit.values())); + return createRandomShape(); } @Override - protected CircleBuilder mutate(CircleBuilder original) throws IOException { - CircleBuilder mutation = copyShape(original); + protected CircleBuilder createMutation(CircleBuilder original) throws IOException { + return mutate(original); + } + + static CircleBuilder mutate(CircleBuilder original) throws IOException { + CircleBuilder mutation = (CircleBuilder) copyShape(original); double radius = original.radius(); DistanceUnit unit = original.unit(); @@ -55,4 +54,12 @@ public class CirlceBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected GeometryCollectionBuilder createTestShapeBuilder() { + GeometryCollectionBuilder geometryCollection = new GeometryCollectionBuilder(); + int shapes = randomIntBetween(0, 8); + for (int i = 0; i < shapes; i++) { + switch (randomIntBetween(0, 7)) { + case 0: + geometryCollection.shape(PointBuilderTests.createRandomShape()); + break; + case 1: + geometryCollection.shape(CircleBuilderTests.createRandomShape()); + break; + case 2: + geometryCollection.shape(EnvelopeBuilderTests.createRandomShape()); + break; + case 3: + geometryCollection.shape(LineStringBuilderTests.createRandomShape()); + break; + case 4: + geometryCollection.shape(MultiLineStringBuilderTests.createRandomShape()); + break; + case 5: + geometryCollection.shape(MultiPolygonBuilderTests.createRandomShape()); + break; + case 6: + geometryCollection.shape(MultiPointBuilderTests.createRandomShape()); + break; + case 7: + geometryCollection.shape(PolygonBuilderTests.createRandomShape()); + break; + } + } + return geometryCollection; + } + + @Override + protected GeometryCollectionBuilder createMutation(GeometryCollectionBuilder original) throws IOException { + return mutate(original); + } + + static GeometryCollectionBuilder mutate(GeometryCollectionBuilder original) throws IOException { + GeometryCollectionBuilder mutation = (GeometryCollectionBuilder) copyShape(original); + if (mutation.shapes.size() > 0) { + int shapePosition = randomIntBetween(0, mutation.shapes.size() - 1); + ShapeBuilder shapeToChange = mutation.shapes.get(shapePosition); + switch (shapeToChange.type()) { + case POINT: + shapeToChange = PointBuilderTests.mutate((PointBuilder) shapeToChange); + break; + case CIRCLE: + shapeToChange = CircleBuilderTests.mutate((CircleBuilder) shapeToChange); + break; + case ENVELOPE: + shapeToChange = EnvelopeBuilderTests.mutate((EnvelopeBuilder) shapeToChange); + break; + case LINESTRING: + shapeToChange = LineStringBuilderTests.mutate((LineStringBuilder) shapeToChange); + break; + case MULTILINESTRING: + shapeToChange = MultiLineStringBuilderTests.mutate((MultiLineStringBuilder) shapeToChange); + break; + case MULTIPOLYGON: + shapeToChange = MultiPolygonBuilderTests.mutate((MultiPolygonBuilder) shapeToChange); + break; + case MULTIPOINT: + shapeToChange = MultiPointBuilderTests.mutate((MultiPointBuilder) shapeToChange); + break; + case POLYGON: + shapeToChange = PolygonBuilderTests.mutate((PolygonBuilder) shapeToChange); + break; + case GEOMETRYCOLLECTION: + throw new UnsupportedOperationException("GeometryCollection should not be nested inside each other"); + } + mutation.shapes.set(shapePosition, shapeToChange); + } else { + mutation.shape(RandomShapeGenerator.createShape(getRandom())); + } + return mutation; + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java new file mode 100644 index 00000000000..7c9e9a58020 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; + +import java.io.IOException; + +public class LineStringBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected LineStringBuilder createTestShapeBuilder() { + return createRandomShape(); + } + + @Override + protected LineStringBuilder createMutation(LineStringBuilder original) throws IOException { + return mutate(original); + } + + static LineStringBuilder mutate(LineStringBuilder original) throws IOException { + LineStringBuilder mutation = (LineStringBuilder) copyShape(original); + Coordinate[] coordinates = original.coordinates(false); + Coordinate coordinate = randomFrom(coordinates); + if (randomBoolean()) { + if (coordinate.x != 0.0) { + coordinate.x = coordinate.x / 2; + } else { + coordinate.x = randomDoubleBetween(-180.0, 180.0, true); + } + } else { + if (coordinate.y != 0.0) { + coordinate.y = coordinate.y / 2; + } else { + coordinate.y = randomDoubleBetween(-90.0, 90.0, true); + } + } + return mutation.points(coordinates); + } + + static LineStringBuilder createRandomShape() { + LineStringBuilder lsb = (LineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.LINESTRING); + if (randomBoolean()) { + lsb.close(); + } + return lsb; + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java new file mode 100644 index 00000000000..c2224ae6d68 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; + +import java.io.IOException; + +public class MultiLineStringBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected MultiLineStringBuilder createTestShapeBuilder() { + return createRandomShape(); + } + + @Override + protected MultiLineStringBuilder createMutation(MultiLineStringBuilder original) throws IOException { + return mutate(original); + } + + static MultiLineStringBuilder mutate(MultiLineStringBuilder original) throws IOException { + MultiLineStringBuilder mutation = (MultiLineStringBuilder) copyShape(original); + Coordinate[][] coordinates = mutation.coordinates(); + int lineToChange = randomInt(coordinates.length - 1); + for (int i = 0; i < coordinates.length; i++) { + Coordinate[] line = coordinates[i]; + if (i == lineToChange) { + Coordinate coordinate = randomFrom(line); + if (randomBoolean()) { + if (coordinate.x != 0.0) { + coordinate.x = coordinate.x / 2; + } else { + coordinate.x = randomDoubleBetween(-180.0, 180.0, true); + } + } else { + if (coordinate.y != 0.0) { + coordinate.y = coordinate.y / 2; + } else { + coordinate.y = randomDoubleBetween(-90.0, 90.0, true); + } + } + } + } + return mutation; + } + + static MultiLineStringBuilder createRandomShape() { + return (MultiLineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.MULTILINESTRING); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java new file mode 100644 index 00000000000..fb365df0122 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; + +import java.io.IOException; + +public class MultiPointBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected MultiPointBuilder createTestShapeBuilder() { + return createRandomShape(); + } + + @Override + protected MultiPointBuilder createMutation(MultiPointBuilder original) throws IOException { + return mutate(original); + } + + static MultiPointBuilder mutate(MultiPointBuilder original) throws IOException { + MultiPointBuilder mutation = (MultiPointBuilder) copyShape(original); + Coordinate[] coordinates = original.coordinates(false); + Coordinate coordinate = randomFrom(coordinates); + if (randomBoolean()) { + if (coordinate.x != 0.0) { + coordinate.x = coordinate.x / 2; + } else { + coordinate.x = randomDoubleBetween(-180.0, 180.0, true); + } + } else { + if (coordinate.y != 0.0) { + coordinate.y = coordinate.y / 2; + } else { + coordinate.y = randomDoubleBetween(-90.0, 90.0, true); + } + } + return mutation.points(coordinates); + } + + static MultiPointBuilder createRandomShape() { + return (MultiPointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.MULTIPOINT); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java new file mode 100644 index 00000000000..702114a2cb8 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; + +import java.io.IOException; + +public class MultiPolygonBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected MultiPolygonBuilder createTestShapeBuilder() { + return createRandomShape(); + } + + @Override + protected MultiPolygonBuilder createMutation(MultiPolygonBuilder original) throws IOException { + return mutate(original); + } + + static MultiPolygonBuilder mutate(MultiPolygonBuilder original) throws IOException { + MultiPolygonBuilder mutation; + if (randomBoolean()) { + mutation = new MultiPolygonBuilder(original.orientation() == Orientation.LEFT ? Orientation.RIGHT : Orientation.LEFT); + for (PolygonBuilder pb : original.polygons()) { + mutation.polygon((PolygonBuilder) copyShape(pb)); + } + } else { + mutation = (MultiPolygonBuilder) copyShape(original); + if (mutation.polygons().size() > 0) { + int polyToChange = randomInt(mutation.polygons().size() - 1); + mutation.polygons().set(polyToChange, PolygonBuilderTests.mutatePolygonBuilder(mutation.polygons().get(polyToChange))); + } else { + mutation.polygon((PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON)); + } + } + return mutation; + } + + static MultiPolygonBuilder createRandomShape() { + MultiPolygonBuilder mpb = new MultiPolygonBuilder(randomFrom(Orientation.values())); + int polys = randomIntBetween(0, 10); + for (int i = 0; i < polys; i++) { + PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON); + mpb.polygon(pgb); + } + return mpb; + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java index 1e94a1bab3a..ba3f808d24a 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java @@ -20,19 +20,30 @@ package org.elasticsearch.common.geo.builders; import com.vividsolutions.jts.geom.Coordinate; - import org.elasticsearch.test.geo.RandomShapeGenerator; import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; +import java.io.IOException; + public class PointBuilderTests extends AbstractShapeBuilderTestCase { @Override protected PointBuilder createTestShapeBuilder() { - return (PointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POINT); + return createRandomShape(); } @Override - protected PointBuilder mutate(PointBuilder original) { - return new PointBuilder().coordinate(new Coordinate(original.longitude()/2, original.latitude()/2)); + protected PointBuilder createMutation(PointBuilder original) throws IOException { + return mutate(original); } + + static PointBuilder mutate(PointBuilder original) { + return new PointBuilder().coordinate(new Coordinate(original.longitude() / 2, original.latitude() / 2)); + } + + static PointBuilder createRandomShape() { + return (PointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POINT); + } + + } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java new file mode 100644 index 00000000000..ea83359c1f0 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java @@ -0,0 +1,95 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; + +import java.io.IOException; + +public class PolygonBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected PolygonBuilder createTestShapeBuilder() { + return createRandomShape(); + } + + @Override + protected PolygonBuilder createMutation(PolygonBuilder original) throws IOException { + return mutate(original); + } + + static PolygonBuilder mutate(PolygonBuilder original) throws IOException { + PolygonBuilder mutation = (PolygonBuilder) copyShape(original); + return mutatePolygonBuilder(mutation); + } + + static PolygonBuilder mutatePolygonBuilder(PolygonBuilder pb) { + if (randomBoolean()) { + pb = polyWithOposingOrientation(pb); + } else { + // change either point in shell or in random hole + LineStringBuilder lineToChange; + if (randomBoolean() || pb.holes().size() == 0) { + lineToChange = pb.shell(); + } else { + lineToChange = randomFrom(pb.holes()); + } + Coordinate coordinate = randomFrom(lineToChange.coordinates(false)); + if (randomBoolean()) { + if (coordinate.x != 0.0) { + coordinate.x = coordinate.x / 2; + } else { + coordinate.x = randomDoubleBetween(-180.0, 180.0, true); + } + } else { + if (coordinate.y != 0.0) { + coordinate.y = coordinate.y / 2; + } else { + coordinate.y = randomDoubleBetween(-90.0, 90.0, true); + } + } + } + return pb; + } + + /** + * Takes an input polygon and returns an identical one, only with opposing orientation setting. + * This is done so we don't have to expose a setter for orientation in the actual class + */ + private static PolygonBuilder polyWithOposingOrientation(PolygonBuilder pb) { + PolygonBuilder mutation = new PolygonBuilder(pb.orientation() == Orientation.LEFT ? Orientation.RIGHT : Orientation.LEFT); + mutation.points(pb.shell().coordinates(false)); + for (LineStringBuilder hole : pb.holes()) { + mutation.hole(hole); + } + return mutation; + } + + static PolygonBuilder createRandomShape() { + PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON); + if (randomBoolean()) { + pgb = polyWithOposingOrientation(pgb); + } + return pgb; + } +} diff --git a/core/src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java b/core/src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java index a4f35389bd3..0b23002a890 100644 --- a/core/src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java +++ b/core/src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.hppc; import com.carrotsearch.hppc.ObjectHashSet; - import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java b/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java index 7901694bd4b..8ba500333e1 100644 --- a/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java @@ -18,7 +18,13 @@ */ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.spi.*; +import org.elasticsearch.common.inject.spi.Element; +import org.elasticsearch.common.inject.spi.Elements; +import org.elasticsearch.common.inject.spi.InstanceBinding; +import org.elasticsearch.common.inject.spi.LinkedKeyBinding; +import org.elasticsearch.common.inject.spi.ProviderInstanceBinding; +import org.elasticsearch.common.inject.spi.ProviderLookup; +import org.elasticsearch.common.inject.spi.UntargettedBinding; import org.elasticsearch.test.ESTestCase; import java.lang.annotation.Annotation; @@ -60,6 +66,24 @@ public abstract class ModuleTestCase extends ESTestCase { fail("Did not find any binding to " + to.getName() + ". Found these bindings:\n" + s); } + /** Configures the module and asserts "clazz" is not bound to anything. */ + public void assertNotBound(Module module, Class clazz) { + List elements = Elements.getElements(module); + for (Element element : elements) { + if (element instanceof LinkedKeyBinding) { + LinkedKeyBinding binding = (LinkedKeyBinding) element; + if (clazz.equals(binding.getKey().getTypeLiteral().getType())) { + fail("Found binding for " + clazz.getName() + " to " + binding.getKey().getTypeLiteral().getType().getTypeName()); + } + } else if (element instanceof UntargettedBinding) { + UntargettedBinding binding = (UntargettedBinding) element; + if (clazz.equals(binding.getKey().getTypeLiteral().getType())) { + fail("Found binding for " + clazz.getName()); + } + } + } + } + /** * Attempts to configure the module, and asserts an {@link IllegalArgumentException} is * caught, containing the given messages diff --git a/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java b/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java index 39d24a0e792..4f2b8f6811c 100644 --- a/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.io; import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; import org.elasticsearch.test.ESTestCase; -import org.junit.Assert; import org.junit.Before; import java.io.IOException; @@ -31,8 +30,8 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; import static org.elasticsearch.common.io.FileTestUtils.assertFileContent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; /** * Unit tests for {@link org.elasticsearch.common.io.FileSystemUtils}. diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java index cec70fb61f5..b5f26dba8a5 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java @@ -25,7 +25,12 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.nio.ByteBuffer; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; public class StreamTests extends ESTestCase { public void testRandomVLongSerialization() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java index 5511796a2ed..a4a5972e45b 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java @@ -34,7 +34,6 @@ import org.apache.lucene.search.QueryUtils; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.Version; import org.elasticsearch.test.ESTestCase; import java.io.IOException; diff --git a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 17345fd714f..0a15693dfd5 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -24,7 +24,14 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.NoDeletionPolicy; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; @@ -35,7 +42,11 @@ import org.apache.lucene.util.Version; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; diff --git a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java index f4f3034528f..7ee238ae7f2 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java @@ -220,6 +220,41 @@ public class SimpleAllTests extends ESTestCase { indexWriter.close(); } + public void testTermMissingFromOneSegment() throws Exception { + Directory dir = new RAMDirectory(); + IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); + + Document doc = new Document(); + doc.add(new Field("_id", "1", StoredField.TYPE)); + AllEntries allEntries = new AllEntries(); + allEntries.addText("field", "something", 2.0f); + allEntries.reset(); + doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER))); + + indexWriter.addDocument(doc); + indexWriter.commit(); + + doc = new Document(); + doc.add(new Field("_id", "2", StoredField.TYPE)); + allEntries = new AllEntries(); + allEntries.addText("field", "else", 1.0f); + allEntries.reset(); + doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER))); + + indexWriter.addDocument(doc); + + IndexReader reader = DirectoryReader.open(indexWriter, true); + assertEquals(2, reader.leaves().size()); + IndexSearcher searcher = new IndexSearcher(reader); + + // "something" only appears in the first segment: + Query query = new AllTermQuery(new Term("_all", "something")); + TopDocs docs = searcher.search(query, 10); + assertEquals(1, docs.totalHits); + + indexWriter.close(); + } + public void testMultipleTokensAllNoBoost() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index ad811a38aed..51d2ba77ec5 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -24,7 +24,13 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -36,7 +42,14 @@ import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java new file mode 100644 index 00000000000..de7a32b2357 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java @@ -0,0 +1,173 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.lucene.search.function; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.TestUtil; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +public class MinScoreScorerTests extends LuceneTestCase { + + private static DocIdSetIterator iterator(final int... docs) { + return new DocIdSetIterator() { + + int i = -1; + + @Override + public int nextDoc() throws IOException { + if (i + 1 == docs.length) { + return NO_MORE_DOCS; + } else { + return docs[++i]; + } + } + + @Override + public int docID() { + return i < 0 ? -1 : i == docs.length ? NO_MORE_DOCS : docs[i]; + } + + @Override + public long cost() { + return docs.length; + } + + @Override + public int advance(int target) throws IOException { + return slowAdvance(target); + } + }; + } + + private static Scorer scorer(int maxDoc, final int[] docs, final float[] scores, final boolean twoPhase) { + final DocIdSetIterator iterator = twoPhase ? DocIdSetIterator.all(maxDoc) : iterator(docs); + return new Scorer(null) { + public DocIdSetIterator iterator() { + if (twoPhase) { + return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator()); + } else { + return iterator; + } + } + + public TwoPhaseIterator twoPhaseIterator() { + if (twoPhase) { + return new TwoPhaseIterator(iterator) { + + @Override + public boolean matches() throws IOException { + return Arrays.binarySearch(docs, iterator.docID()) >= 0; + } + + @Override + public float matchCost() { + return 10; + } + }; + } else { + return null; + } + } + + @Override + public int docID() { + return iterator.docID(); + } + + @Override + public float score() throws IOException { + final int idx = Arrays.binarySearch(docs, docID()); + return scores[idx]; + } + + @Override + public int freq() throws IOException { + return 1; + } + }; + } + + public void doTestRandom(boolean twoPhase) throws IOException { + final int maxDoc = TestUtil.nextInt(random(), 10, 10000); + final int numDocs = TestUtil.nextInt(random(), 1, maxDoc / 2); + final Set uniqueDocs = new HashSet<>(); + while (uniqueDocs.size() < numDocs) { + uniqueDocs.add(random().nextInt(maxDoc)); + } + final int[] docs = new int[numDocs]; + int i = 0; + for (int doc : uniqueDocs) { + docs[i++] = doc; + } + Arrays.sort(docs); + final float[] scores = new float[numDocs]; + for (i = 0; i < numDocs; ++i) { + scores[i] = random().nextFloat(); + } + Scorer scorer = scorer(maxDoc, docs, scores, twoPhase); + final float minScore = random().nextFloat(); + Scorer minScoreScorer = new MinScoreScorer(null, scorer, minScore); + int doc = -1; + while (doc != DocIdSetIterator.NO_MORE_DOCS) { + final int target; + if (random().nextBoolean()) { + target = doc + 1; + doc = minScoreScorer.iterator().nextDoc(); + } else { + target = doc + TestUtil.nextInt(random(), 1, 10); + doc = minScoreScorer.iterator().advance(target); + } + int idx = Arrays.binarySearch(docs, target); + if (idx < 0) { + idx = -1 - idx; + } + while (idx < docs.length && scores[idx] < minScore) { + idx += 1; + } + if (idx == docs.length) { + assertEquals(DocIdSetIterator.NO_MORE_DOCS, doc); + } else { + assertEquals(docs[idx], doc); + assertEquals(scores[idx], scorer.score(), 0f); + } + } + } + + public void testRegularIterator() throws IOException { + final int iters = atLeast(5); + for (int iter = 0; iter < iters; ++iter) { + doTestRandom(false); + } + } + + public void testTwoPhaseIterator() throws IOException { + final int iters = atLeast(5); + for (int iter = 0; iter < iters; ++iter) { + doTestRandom(true); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java b/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java index ef8c55ddf90..0b00353f98a 100644 --- a/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/CidrsTests.java @@ -20,13 +20,13 @@ package org.elasticsearch.common.network; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.network.Cidrs; -import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeBuilder; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.List; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; public class CidrsTests extends ESTestCase { public void testNullCidr() { @@ -133,8 +133,9 @@ public class CidrsTests extends ESTestCase { public void testValidCombinations() { for (long i = 0; i < (1 << 16); i++) { + String octetsString = Cidrs.octetsToString(Cidrs.longToOctets(i << 16)); for (int mask = 16; mask <= 32; mask++) { - String test = Cidrs.octetsToCIDR(Cidrs.longToOctets(i << 16), mask); + String test = octetsString + "/" + mask; long[] actual = Cidrs.cidrMaskToMinMax(test); assertNotNull(test, actual); assertEquals(test, 2, actual.length); diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java new file mode 100644 index 00000000000..798e82a979e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -0,0 +1,176 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.network; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Table; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.ModuleTestCase; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.http.HttpServerAdapter; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.http.HttpStats; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.cat.AbstractCatAction; +import org.elasticsearch.rest.action.cat.RestNodesAction; +import org.elasticsearch.rest.action.main.RestMainAction; +import org.elasticsearch.test.transport.AssertingLocalTransport; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; + +public class NetworkModuleTests extends ModuleTestCase { + + static class FakeTransportService extends TransportService { + public FakeTransportService() { + super(null, null); + } + } + + static class FakeTransport extends AssertingLocalTransport { + public FakeTransport() { + super(null, null, null, null); + } + } + + static class FakeHttpTransport extends AbstractLifecycleComponent implements HttpServerTransport { + public FakeHttpTransport() { + super(null); + } + @Override + protected void doStart() {} + @Override + protected void doStop() {} + @Override + protected void doClose() {} + @Override + public BoundTransportAddress boundAddress() { + return null; + } + @Override + public HttpInfo info() { + return null; + } + @Override + public HttpStats stats() { + return null; + } + @Override + public void httpServerAdapter(HttpServerAdapter httpServerAdapter) {} + } + + static class FakeRestHandler extends BaseRestHandler { + public FakeRestHandler() { + super(null, null, null); + } + @Override + protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception {} + } + + static class FakeCatRestHandler extends AbstractCatAction { + public FakeCatRestHandler() { + super(null, null, null); + } + @Override + protected void doRequest(RestRequest request, RestChannel channel, Client client) {} + @Override + protected void documentation(StringBuilder sb) {} + @Override + protected Table getTableWithHeader(RestRequest request) { + return null; + } + } + + public void testRegisterTransportService() { + Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "custom").build(); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + module.registerTransportService("custom", FakeTransportService.class); + assertBinding(module, TransportService.class, FakeTransportService.class); + + // check it works with transport only as well + module = new NetworkModule(new NetworkService(settings), settings, true); + module.registerTransportService("custom", FakeTransportService.class); + assertBinding(module, TransportService.class, FakeTransportService.class); + } + + public void testRegisterTransport() { + Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "custom").build(); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + module.registerTransport("custom", FakeTransport.class); + assertBinding(module, Transport.class, FakeTransport.class); + + // check it works with transport only as well + module = new NetworkModule(new NetworkService(settings), settings, true); + module.registerTransport("custom", FakeTransport.class); + assertBinding(module, Transport.class, FakeTransport.class); + } + + public void testRegisterHttpTransport() { + Settings settings = Settings.builder().put(NetworkModule.HTTP_TYPE_KEY, "custom").build(); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + module.registerHttpTransport("custom", FakeHttpTransport.class); + assertBinding(module, HttpServerTransport.class, FakeHttpTransport.class); + + // check registration not allowed for transport only + module = new NetworkModule(new NetworkService(settings), settings, true); + try { + module.registerHttpTransport("custom", FakeHttpTransport.class); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Cannot register http transport")); + assertTrue(e.getMessage().contains("for transport client")); + } + + // not added if http is disabled + settings = Settings.builder().put(NetworkModule.HTTP_ENABLED, false).build(); + module = new NetworkModule(new NetworkService(settings), settings, false); + assertNotBound(module, HttpServerTransport.class); + } + + public void testRegisterRestHandler() { + Settings settings = Settings.EMPTY; + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + module.registerRestHandler(FakeRestHandler.class); + // also check a builtin is bound + assertSetMultiBinding(module, RestHandler.class, FakeRestHandler.class, RestMainAction.class); + + // check registration not allowed for transport only + module = new NetworkModule(new NetworkService(settings), settings, true); + try { + module.registerRestHandler(FakeRestHandler.class); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Cannot register rest handler")); + assertTrue(e.getMessage().contains("for transport client")); + } + } + + public void testRegisterCatRestHandler() { + Settings settings = Settings.EMPTY; + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + module.registerRestHandler(FakeCatRestHandler.class); + // also check a builtin is bound + assertSetMultiBinding(module, AbstractCatAction.class, FakeCatRestHandler.class, RestNodesAction.class); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java new file mode 100644 index 00000000000..97393c51b8d --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -0,0 +1,168 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.settings; + +import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +public class ScopedSettingsTests extends ESTestCase { + + public void testAddConsumer() { + Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.CLUSTER); + AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, Collections.singleton(testSetting)); + + AtomicInteger consumer = new AtomicInteger(); + service.addSettingsUpdateConsumer(testSetting, consumer::set); + AtomicInteger consumer2 = new AtomicInteger(); + try { + service.addSettingsUpdateConsumer(testSetting2, consumer2::set); + fail("setting not registered"); + } catch (IllegalArgumentException ex) { + assertEquals("Setting is not registered for key [foo.bar.baz]", ex.getMessage()); + } + + try { + service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> {consumer.set(a); consumer2.set(b);}); + fail("setting not registered"); + } catch (IllegalArgumentException ex) { + assertEquals("Setting is not registered for key [foo.bar.baz]", ex.getMessage()); + } + assertEquals(0, consumer.get()); + assertEquals(0, consumer2.get()); + service.applySettings(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build()); + assertEquals(2, consumer.get()); + assertEquals(0, consumer2.get()); + } + + public void testApply() { + Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.CLUSTER); + AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(testSetting, testSetting2))); + + AtomicInteger consumer = new AtomicInteger(); + service.addSettingsUpdateConsumer(testSetting, consumer::set); + AtomicInteger consumer2 = new AtomicInteger(); + service.addSettingsUpdateConsumer(testSetting2, consumer2::set, (s) -> assertTrue(s > 0)); + + AtomicInteger aC = new AtomicInteger(); + AtomicInteger bC = new AtomicInteger(); + service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> {aC.set(a); bC.set(b);}); + + assertEquals(0, consumer.get()); + assertEquals(0, consumer2.get()); + assertEquals(0, aC.get()); + assertEquals(0, bC.get()); + try { + service.applySettings(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", -15).build()); + fail("invalid value"); + } catch (IllegalArgumentException ex) { + assertEquals("illegal value can't update [foo.bar.baz] from [1] to [-15]", ex.getMessage()); + } + assertEquals(0, consumer.get()); + assertEquals(0, consumer2.get()); + assertEquals(0, aC.get()); + assertEquals(0, bC.get()); + try { + service.dryRun(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", -15).build()); + fail("invalid value"); + } catch (IllegalArgumentException ex) { + assertEquals("illegal value can't update [foo.bar.baz] from [1] to [-15]", ex.getMessage()); + } + + assertEquals(0, consumer.get()); + assertEquals(0, consumer2.get()); + assertEquals(0, aC.get()); + assertEquals(0, bC.get()); + service.dryRun(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build()); + assertEquals(0, consumer.get()); + assertEquals(0, consumer2.get()); + assertEquals(0, aC.get()); + assertEquals(0, bC.get()); + + service.applySettings(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build()); + assertEquals(2, consumer.get()); + assertEquals(15, consumer2.get()); + assertEquals(2, aC.get()); + assertEquals(15, bC.get()); + } + + public void testGet() { + ClusterSettings settings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + + // group setting - complex matcher + Setting setting = settings.get("cluster.routing.allocation.require.value"); + assertEquals(setting, FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING); + + setting = settings.get("cluster.routing.allocation.total_shards_per_node"); + assertEquals(setting, ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING); + + // array settings - complex matcher + assertNotNull(settings.get("transport.tracer.include." + randomIntBetween(1, 100))); + assertSame(TransportService.TRACE_LOG_INCLUDE_SETTING, settings.get("transport.tracer.include." + randomIntBetween(1, 100))); + + // array settings - complex matcher - only accepts numbers + assertNull(settings.get("transport.tracer.include.FOO")); + } + + public void testIsDynamic(){ + ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER), Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.CLUSTER)))); + assertFalse(settings.hasDynamicSetting("foo.bar.baz")); + assertTrue(settings.hasDynamicSetting("foo.bar")); + assertNotNull(settings.get("foo.bar.baz")); + settings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + assertTrue(settings.hasDynamicSetting("transport.tracer.include." + randomIntBetween(1, 100))); + assertFalse(settings.hasDynamicSetting("transport.tracer.include.BOOM")); + assertTrue(settings.hasDynamicSetting("cluster.routing.allocation.require.value")); + } + + public void testDiff() throws IOException { + Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.CLUSTER); + Setting foobar = Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER); + ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(foobar, foobarbaz))); + Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY); + assertEquals(diff.getAsMap().size(), 1); + assertEquals(diff.getAsInt("foo.bar.baz", null), Integer.valueOf(1)); + + diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.builder().put("foo.bar.baz", 17).build()); + assertEquals(diff.getAsMap().size(), 1); + assertEquals(diff.getAsInt("foo.bar.baz", null), Integer.valueOf(17)); + } + + public void testUpdateTracer() { + ClusterSettings settings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + AtomicReference> ref = new AtomicReference<>(); + settings.addSettingsUpdateConsumer(TransportService.TRACE_LOG_INCLUDE_SETTING, ref::set); + settings.applySettings(Settings.builder().putArray("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*").build()); + assertNotNull(ref.get().size()); + assertEquals(ref.get().size(), 2); + assertTrue(ref.get().contains("internal:index/shard/recovery/*")); + assertTrue(ref.get().contains("internal:gateway/local*")); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java new file mode 100644 index 00000000000..71914444725 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -0,0 +1,322 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.settings; + +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + +public class SettingTests extends ESTestCase { + + + public void testGet() { + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); + assertFalse(booleanSetting.get(Settings.EMPTY)); + assertFalse(booleanSetting.get(Settings.builder().put("foo.bar", false).build())); + assertTrue(booleanSetting.get(Settings.builder().put("foo.bar", true).build())); + } + + public void testByteSize() { + Setting byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", new ByteSizeValue(1024), true, Setting.Scope.CLUSTER); + assertFalse(byteSizeValueSetting.isGroupSetting()); + ByteSizeValue byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); + assertEquals(byteSizeValue.bytes(), 1024); + AtomicReference value = new AtomicReference<>(null); + ClusterSettings.SettingUpdater settingUpdater = byteSizeValueSetting.newUpdater(value::set, logger); + try { + settingUpdater.apply(Settings.builder().put("a.byte.size", 12).build(), Settings.EMPTY); + fail("no unit"); + } catch (IllegalArgumentException ex) { + assertEquals("failed to parse setting [a.byte.size] with value [12] as a size in bytes: unit is missing or unrecognized", ex.getMessage()); + } + + assertTrue(settingUpdater.apply(Settings.builder().put("a.byte.size", "12b").build(), Settings.EMPTY)); + assertEquals(new ByteSizeValue(12), value.get()); + } + + public void testSimpleUpdate() { + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); + AtomicReference atomicBoolean = new AtomicReference<>(null); + ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger); + Settings build = Settings.builder().put("foo.bar", false).build(); + settingUpdater.apply(build, Settings.EMPTY); + assertNull(atomicBoolean.get()); + build = Settings.builder().put("foo.bar", true).build(); + settingUpdater.apply(build, Settings.EMPTY); + assertTrue(atomicBoolean.get()); + + // try update bogus value + build = Settings.builder().put("foo.bar", "I am not a boolean").build(); + try { + settingUpdater.apply(build, Settings.EMPTY); + fail("not a boolean"); + } catch (IllegalArgumentException ex) { + assertEquals("Failed to parse value [I am not a boolean] cannot be parsed to boolean [ true/1/on/yes OR false/0/off/no ]", ex.getMessage()); + } + } + + public void testUpdateNotDynamic() { + Setting booleanSetting = Setting.boolSetting("foo.bar", false, false, Setting.Scope.CLUSTER); + assertFalse(booleanSetting.isGroupSetting()); + AtomicReference atomicBoolean = new AtomicReference<>(null); + try { + booleanSetting.newUpdater(atomicBoolean::set, logger); + fail("not dynamic"); + } catch (IllegalStateException ex) { + assertEquals("setting [foo.bar] is not dynamic", ex.getMessage()); + } + } + + public void testUpdaterIsIsolated() { + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); + AtomicReference ab1 = new AtomicReference<>(null); + AtomicReference ab2 = new AtomicReference<>(null); + ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger); + ClusterSettings.SettingUpdater settingUpdater2 = booleanSetting.newUpdater(ab2::set, logger); + settingUpdater.apply(Settings.builder().put("foo.bar", true).build(), Settings.EMPTY); + assertTrue(ab1.get()); + assertNull(ab2.get()); + } + + public void testDefault() { + TimeValue defautlValue = TimeValue.timeValueMillis(randomIntBetween(0, 1000000)); + Setting setting = Setting.positiveTimeSetting("my.time.value", defautlValue, randomBoolean(), Setting.Scope.CLUSTER); + assertFalse(setting.isGroupSetting()); + String aDefault = setting.getDefault(Settings.EMPTY); + assertEquals(defautlValue.millis() + "ms", aDefault); + assertEquals(defautlValue.millis(), setting.get(Settings.EMPTY).millis()); + + Setting secondaryDefault = new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), Setting.Scope.CLUSTER); + assertEquals("some_default", secondaryDefault.get(Settings.EMPTY)); + assertEquals("42", secondaryDefault.get(Settings.builder().put("old.foo.bar", 42).build())); + } + + public void testComplexType() { + AtomicReference ref = new AtomicReference<>(null); + Setting setting = new Setting<>("foo.bar", (s) -> "", (s) -> new ComplexType(s), true, Setting.Scope.CLUSTER); + assertFalse(setting.isGroupSetting()); + ref.set(setting.get(Settings.EMPTY)); + ComplexType type = ref.get(); + ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); + assertFalse(settingUpdater.apply(Settings.EMPTY, Settings.EMPTY)); + assertSame("no update - type has not changed", type, ref.get()); + + // change from default + assertTrue(settingUpdater.apply(Settings.builder().put("foo.bar", "2").build(), Settings.EMPTY)); + assertNotSame("update - type has changed", type, ref.get()); + assertEquals("2", ref.get().foo); + + + // change back to default... + assertTrue(settingUpdater.apply(Settings.EMPTY, Settings.builder().put("foo.bar", "2").build())); + assertNotSame("update - type has changed", type, ref.get()); + assertEquals("", ref.get().foo); + } + + public void testType() { + Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.CLUSTER); + assertEquals(integerSetting.getScope(), Setting.Scope.CLUSTER); + integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.INDEX); + assertEquals(integerSetting.getScope(), Setting.Scope.INDEX); + } + + public void testGroups() { + AtomicReference ref = new AtomicReference<>(null); + Setting setting = Setting.groupSetting("foo.bar.", true, Setting.Scope.CLUSTER); + assertTrue(setting.isGroupSetting()); + ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); + + Settings currentInput = Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").put("foo.bar.3.value", "3").build(); + Settings previousInput = Settings.EMPTY; + assertTrue(settingUpdater.apply(currentInput, previousInput)); + assertNotNull(ref.get()); + Settings settings = ref.get(); + Map asMap = settings.getAsGroups(); + assertEquals(3, asMap.size()); + assertEquals(asMap.get("1").get("value"), "1"); + assertEquals(asMap.get("2").get("value"), "2"); + assertEquals(asMap.get("3").get("value"), "3"); + + previousInput = currentInput; + currentInput = Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").put("foo.bar.3.value", "3").build(); + Settings current = ref.get(); + assertFalse(settingUpdater.apply(currentInput, previousInput)); + assertSame(current, ref.get()); + + previousInput = currentInput; + currentInput = Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build(); + // now update and check that we got it + assertTrue(settingUpdater.apply(currentInput, previousInput)); + assertNotSame(current, ref.get()); + + asMap = ref.get().getAsGroups(); + assertEquals(2, asMap.size()); + assertEquals(asMap.get("1").get("value"), "1"); + assertEquals(asMap.get("2").get("value"), "2"); + + previousInput = currentInput; + currentInput = Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "4").build(); + // now update and check that we got it + assertTrue(settingUpdater.apply(currentInput, previousInput)); + assertNotSame(current, ref.get()); + + asMap = ref.get().getAsGroups(); + assertEquals(2, asMap.size()); + assertEquals(asMap.get("1").get("value"), "1"); + assertEquals(asMap.get("2").get("value"), "4"); + + assertTrue(setting.match("foo.bar.baz")); + assertFalse(setting.match("foo.baz.bar")); + + ClusterSettings.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger,(s) -> assertFalse(true)); + try { + predicateSettingUpdater.apply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build(), Settings.EMPTY); + fail("not accepted"); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "illegal value can't update [foo.bar.] from [{}] to [{1.value=1, 2.value=2}]"); + } + } + + public static class ComplexType { + + final String foo; + + public ComplexType(String foo) { + this.foo = foo; + } + } + + public static class Composite { + + private Integer b; + private Integer a; + + public void set(Integer a, Integer b) { + this.a = a; + this.b = b; + } + } + + + public void testComposite() { + Composite c = new Composite(); + Setting a = Setting.intSetting("foo.int.bar.a", 1, true, Setting.Scope.CLUSTER); + Setting b = Setting.intSetting("foo.int.bar.b", 1, true, Setting.Scope.CLUSTER); + ClusterSettings.SettingUpdater> settingUpdater = Setting.compoundUpdater(c::set, a, b, logger); + assertFalse(settingUpdater.apply(Settings.EMPTY, Settings.EMPTY)); + assertNull(c.a); + assertNull(c.b); + + Settings build = Settings.builder().put("foo.int.bar.a", 2).build(); + assertTrue(settingUpdater.apply(build, Settings.EMPTY)); + assertEquals(2, c.a.intValue()); + assertEquals(1, c.b.intValue()); + + Integer aValue = c.a; + assertFalse(settingUpdater.apply(build, build)); + assertSame(aValue, c.a); + Settings previous = build; + build = Settings.builder().put("foo.int.bar.a", 2).put("foo.int.bar.b", 5).build(); + assertTrue(settingUpdater.apply(build, previous)); + assertEquals(2, c.a.intValue()); + assertEquals(5, c.b.intValue()); + + // reset to default + assertTrue(settingUpdater.apply(Settings.EMPTY, build)); + assertEquals(1, c.a.intValue()); + assertEquals(1, c.b.intValue()); + + } + + public void testListSettings() { + Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), true, Setting.Scope.CLUSTER); + List value = listSetting.get(Settings.EMPTY); + assertEquals(1, value.size()); + assertEquals("foo,bar", value.get(0)); + + List input = Arrays.asList("test", "test1, test2", "test", ",,,,"); + Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0])); + value = listSetting.get(builder.build()); + assertEquals(input.size(), value.size()); + assertArrayEquals(value.toArray(new String[0]), input.toArray(new String[0])); + + // try to parse this really annoying format + builder = Settings.builder(); + for (int i = 0; i < input.size(); i++) { + builder.put("foo.bar." + i, input.get(i)); + } + value = listSetting.get(builder.build()); + assertEquals(input.size(), value.size()); + assertArrayEquals(value.toArray(new String[0]), input.toArray(new String[0])); + + AtomicReference> ref = new AtomicReference<>(); + AbstractScopedSettings.SettingUpdater settingUpdater = listSetting.newUpdater(ref::set, logger); + assertTrue(settingUpdater.hasChanged(builder.build(), Settings.EMPTY)); + settingUpdater.apply(builder.build(), Settings.EMPTY); + assertEquals(input.size(), ref.get().size()); + assertArrayEquals(ref.get().toArray(new String[0]), input.toArray(new String[0])); + + settingUpdater.apply(Settings.builder().putArray("foo.bar", "123").build(), builder.build()); + assertEquals(1, ref.get().size()); + assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"123"}); + + settingUpdater.apply(Settings.builder().put("foo.bar", "1,2,3").build(), Settings.builder().putArray("foo.bar", "123").build()); + assertEquals(3, ref.get().size()); + assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"1", "2", "3"}); + + settingUpdater.apply(Settings.EMPTY, Settings.builder().put("foo.bar", "1,2,3").build()); + assertEquals(1, ref.get().size()); + assertEquals("foo,bar", ref.get().get(0)); + + Setting> otherSettings = Setting.listSetting("foo.bar", Collections.emptyList(), Integer::parseInt, true, Setting.Scope.CLUSTER); + List defaultValue = otherSettings.get(Settings.EMPTY); + assertEquals(0, defaultValue.size()); + List intValues = otherSettings.get(Settings.builder().put("foo.bar", "0,1,2,3").build()); + assertEquals(4, intValues.size()); + for (int i = 0; i < intValues.size(); i++) { + assertEquals(i, intValues.get(i).intValue()); + } + } + + public void testListSettingAcceptsNumberSyntax() { + Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), true, Setting.Scope.CLUSTER); + List input = Arrays.asList("test", "test1, test2", "test", ",,,,"); + Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0])); + // try to parse this really annoying format + for (String key : builder.internalMap().keySet()) { + assertTrue("key: " + key + " doesn't match", listSetting.match(key)); + } + builder = Settings.builder().put("foo.bar", "1,2,3"); + for (String key : builder.internalMap().keySet()) { + assertTrue("key: " + key + " doesn't match", listSetting.match(key)); + } + assertFalse(listSetting.match("foo_bar")); + assertFalse(listSetting.match("foo_bar.1")); + assertTrue(listSetting.match("foo.bar")); + assertTrue(listSetting.match("foo.bar." + randomIntBetween(0,10000))); + + } +} diff --git a/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java b/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java index 41ed95a519e..55bc2b8ddb9 100644 --- a/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java +++ b/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java @@ -28,7 +28,9 @@ import java.net.InetAddress; import java.util.ArrayList; import java.util.List; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; /** * Basic tests for the {@link BoundTransportAddress} class. These tests should not bind to any addresses but should diff --git a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java index ec0e26091df..2945d86fe59 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java @@ -162,4 +162,14 @@ public class TimeValueTests extends ESTestCase { assertThat(e.getMessage(), containsString("Failed to parse")); } } + + public void testToStringRep() { + assertEquals("-1", new TimeValue(-1).getStringRep()); + assertEquals("10ms", new TimeValue(10, TimeUnit.MILLISECONDS).getStringRep()); + assertEquals("1533ms", new TimeValue(1533, TimeUnit.MILLISECONDS).getStringRep()); + assertEquals("90s", new TimeValue(90, TimeUnit.SECONDS).getStringRep()); + assertEquals("90m", new TimeValue(90, TimeUnit.MINUTES).getStringRep()); + assertEquals("36h", new TimeValue(36, TimeUnit.HOURS).getStringRep()); + assertEquals("1000d", new TimeValue(1000, TimeUnit.DAYS).getStringRep()); + } } diff --git a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java index 7d36c09ee19..bf55a330509 100644 --- a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java @@ -23,11 +23,11 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; @@ -336,9 +336,9 @@ public class BigArraysTests extends ESSingleNodeTestCase { for (String type : Arrays.asList("Byte", "Int", "Long", "Float", "Double", "Object")) { HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService( Settings.builder() - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, size - 1, ByteSizeUnit.BYTES) + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), size - 1, ByteSizeUnit.BYTES) .build(), - new NodeSettingsService(Settings.EMPTY)); + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); BigArrays bigArrays = new BigArrays(null, hcbs).withCircuitBreaking(); Method create = BigArrays.class.getMethod("new" + type + "Array", long.class); try { @@ -356,9 +356,9 @@ public class BigArraysTests extends ESSingleNodeTestCase { final long maxSize = randomIntBetween(1 << 10, 1 << 22); HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService( Settings.builder() - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, maxSize, ByteSizeUnit.BYTES) + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES) .build(), - new NodeSettingsService(Settings.EMPTY)); + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); BigArrays bigArrays = new BigArrays(null, hcbs).withCircuitBreaking(); Method create = BigArrays.class.getMethod("new" + type + "Array", long.class); final int size = scaledRandomIntBetween(1, 20); diff --git a/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java b/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java index df26f2d55b8..a26a06a09a3 100644 --- a/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.util; import com.carrotsearch.hppc.ObjectLongHashMap; import com.carrotsearch.hppc.ObjectLongMap; import com.carrotsearch.hppc.cursors.ObjectLongCursor; - import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.TestUtil; diff --git a/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java b/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java index 4c3612da8e0..8c192a2a350 100644 --- a/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java @@ -25,7 +25,14 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.Counter; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.SortedSet; +import java.util.TreeSet; import static org.elasticsearch.common.util.CollectionUtils.eagerPartition; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/common/util/ExtensionPointTests.java b/core/src/test/java/org/elasticsearch/common/util/ExtensionPointTests.java index 25564deb07e..b4bc99e9642 100644 --- a/core/src/test/java/org/elasticsearch/common/util/ExtensionPointTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/ExtensionPointTests.java @@ -20,7 +20,11 @@ package org.elasticsearch.common.util; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; -import org.elasticsearch.common.inject.*; +import org.elasticsearch.common.inject.Binder; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java b/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java index aa21f323185..f5ae388db77 100644 --- a/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.util; import com.carrotsearch.hppc.LongLongHashMap; import com.carrotsearch.hppc.LongLongMap; import com.carrotsearch.hppc.cursors.LongLongCursor; - import org.elasticsearch.test.ESSingleNodeTestCase; import java.util.HashMap; diff --git a/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java b/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java index 35fa7bec058..bf091828ca5 100644 --- a/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.util; import com.carrotsearch.hppc.LongObjectHashMap; - import org.elasticsearch.test.ESSingleNodeTestCase; public class LongObjectHashMapTests extends ESSingleNodeTestCase { diff --git a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java index 94f154d4e5d..25c765e6480 100644 --- a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.common.util; -import java.nio.charset.StandardCharsets; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; @@ -40,6 +39,7 @@ import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java index deac15b50d3..685e06afb16 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java @@ -27,7 +27,13 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.concurrent.*; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.PriorityBlockingQueue; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java index 7489ea8f9dc..9b1cfb64573 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.xcontent; import com.fasterxml.jackson.dataformat.cbor.CBORConstants; import com.fasterxml.jackson.dataformat.smile.SmileConstants; - import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; @@ -97,17 +96,17 @@ public class XContentFactoryTests extends ESTestCase { is = new ByteArrayInputStream(new byte[] {(byte) 1}); assertNull(XContentFactory.xContentType(is)); } - + public void testJsonFromBytesOptionallyPrecededByUtf8Bom() throws Exception { byte[] bytes = new byte[] {(byte) '{', (byte) '}'}; assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); - + bytes = new byte[] {(byte) 0x20, (byte) '{', (byte) '}'}; assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); - + bytes = new byte[] {(byte) 0xef, (byte) 0xbb, (byte) 0xbf, (byte) '{', (byte) '}'}; assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); - + bytes = new byte[] {(byte) 0xef, (byte) 0xbb, (byte) 0xbf, (byte) 0x20, (byte) '{', (byte) '}'}; assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); } diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index 7ffafc004ab..9129e3c05b3 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.xcontent.builder; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.io.FastCharArrayWriter; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,6 +38,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; +import java.util.Collections; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; @@ -51,9 +51,6 @@ import static org.elasticsearch.common.xcontent.XContentBuilder.FieldCaseConvers import static org.elasticsearch.common.xcontent.XContentBuilder.FieldCaseConversion.UNDERSCORE; import static org.hamcrest.Matchers.equalTo; -/** - * - */ public class XContentBuilderTests extends ESTestCase { public void testPrettyWithLfAtEnd() throws Exception { ByteArrayOutputStream os = new ByteArrayOutputStream(); @@ -350,4 +347,33 @@ public class XContentBuilderTests extends ESTestCase { "}", string.trim()); } + public void testWriteMapWithNullKeys() throws IOException { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + try { + builder.map(Collections.singletonMap(null, "test")); + fail("write map should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field name cannot be null")); + } + } + + public void testWriteMapValueWithNullKeys() throws IOException { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + try { + builder.value(Collections.singletonMap(null, "test")); + fail("write map should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field name cannot be null")); + } + } + + public void testWriteFieldMapWithNullKeys() throws IOException { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + try { + builder.field("map", Collections.singletonMap(null, "test")); + fail("write map should have failed"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("field name cannot be null")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java index 50683007717..80cc12b5f3b 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java @@ -21,7 +21,12 @@ package org.elasticsearch.common.xcontent.support.filtering; import org.elasticsearch.test.ESTestCase; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isEmptyString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class FilterPathTests extends ESTestCase { @@ -348,4 +353,4 @@ public class FilterPathTests extends ESTestCase { assertThat(filterPath.getSegment(), isEmptyString()); assertSame(filterPath, FilterPath.EMPTY); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java b/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java index 8e44c7a5442..4efedd9154a 100644 --- a/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java +++ b/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; - import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java b/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java index 1332a4fb13a..223f8612b31 100644 --- a/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java @@ -31,7 +31,10 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.CyclicBarrier; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public class BlockingClusterStatePublishResponseHandlerTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index f9778f6438f..67d3df42b38 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -25,7 +25,11 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -55,7 +59,16 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration; -import org.elasticsearch.test.disruption.*; +import org.elasticsearch.test.disruption.BlockClusterStateProcessing; +import org.elasticsearch.test.disruption.IntermittentLongGCDisruption; +import org.elasticsearch.test.disruption.LongGCDisruption; +import org.elasticsearch.test.disruption.NetworkDelaysPartition; +import org.elasticsearch.test.disruption.NetworkDisconnectPartition; +import org.elasticsearch.test.disruption.NetworkPartition; +import org.elasticsearch.test.disruption.NetworkUnresponsivePartition; +import org.elasticsearch.test.disruption.ServiceDisruptionScheme; +import org.elasticsearch.test.disruption.SingleNodeDisruption; +import org.elasticsearch.test.disruption.SlowClusterStateProcessing; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.TransportException; @@ -65,15 +78,31 @@ import org.elasticsearch.transport.TransportService; import org.junit.Before; import java.io.IOException; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 0) @ESIntegTestCase.SuppressLocalMode @@ -132,7 +161,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // for hitting simulated network failures quickly .put(FaultDetection.SETTING_PING_RETRIES, "1") // for hitting simulated network failures quickly .put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out - .put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put("http.enabled", false) // just to make test quicker .put("gateway.local.list_timeout", "10s") // still long to induce failures but to long so test won't time out .build(); @@ -150,7 +179,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // TODO: Rarely use default settings form some of these Settings nodeSettings = Settings.builder() .put(DEFAULT_SETTINGS) - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, minimumMasterNode) + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minimumMasterNode) .build(); if (discoveryConfig == null) { @@ -217,7 +246,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { logger.info("--> reducing min master nodes to 2"); assertAcked(client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2)).get()); + .setTransientSettings(Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2)).get()); String master = internalCluster().getMasterName(); String nonMaster = null; @@ -293,9 +322,9 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // Wait until the master node sees al 3 nodes again. ensureStableCluster(3, new TimeValue(DISRUPTION_HEALING_OVERHEAD.millis() + networkPartition.expectedTimeToHeal().millis())); - logger.info("Verify no master block with {} set to {}", DiscoverySettings.NO_MASTER_BLOCK, "all"); + logger.info("Verify no master block with {} set to {}", DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "all"); client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(DiscoverySettings.NO_MASTER_BLOCK, "all")) + .setTransientSettings(Settings.builder().put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "all")) .get(); networkPartition.startDisrupting(); @@ -863,7 +892,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { internalCluster().startNodesAsync(3, Settings.builder() .put(DiscoveryService.SETTING_INITIAL_STATE_TIMEOUT, "1ms") - .put(DiscoverySettings.PUBLISH_TIMEOUT, "3s") + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "3s") .build()).get(); logger.info("applying disruption while cluster is forming ..."); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java index c4955561905..537ffa3acd1 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java @@ -26,7 +26,11 @@ import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; public class ElectMasterServiceTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java index 42cb7cf43f4..0ca261cbf65 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; @@ -38,14 +39,22 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.BaseFuture; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.membership.MembershipAction; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.junit.annotations.TestLogging; import org.junit.Before; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -67,7 +76,7 @@ public class NodeJoinControllerTests extends ESTestCase { // make sure we have a master clusterService.setState(ClusterState.builder(clusterService.state()).nodes(DiscoveryNodes.builder(initialNodes).masterNodeId(localNode.id()))); nodeJoinController = new NodeJoinController(clusterService, new NoopRoutingService(Settings.EMPTY), - new DiscoverySettings(Settings.EMPTY, new NodeSettingsService(Settings.EMPTY)), Settings.EMPTY); + new DiscoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), Settings.EMPTY); } public void testSimpleJoinAccumulation() throws InterruptedException, ExecutionException { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index 0b5f9997dba..9f9c0420c2f 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Priority; @@ -45,6 +46,7 @@ import org.elasticsearch.discovery.zen.fd.FaultDetection; import org.elasticsearch.discovery.zen.membership.MembershipAction; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.TestCustomMetaData; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BytesTransportRequest; @@ -59,6 +61,7 @@ import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; +import java.util.EnumSet; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -84,7 +87,7 @@ public class ZenDiscoveryIT extends ESIntegTestCase { assertThat(zenDiscovery.isRejoinOnMasterGone(), is(true)); client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(ZenDiscovery.SETTING_REJOIN_ON_MASTER_GONE, false)) + .setTransientSettings(Settings.builder().put(ZenDiscovery.REJOIN_ON_MASTER_GONE_SETTING.getKey(), false)) .get(); assertThat(zenDiscovery.isRejoinOnMasterGone(), is(false)); @@ -228,16 +231,69 @@ public class ZenDiscoveryIT extends ESIntegTestCase { assertThat(ExceptionsHelper.detailedMessage(reference.get()), containsString("cluster state from a different master than the current one, rejecting")); } + public void testHandleNodeJoin_incompatibleClusterState() throws UnknownHostException { + Settings nodeSettings = Settings.settingsBuilder() + .put("discovery.type", "zen") // <-- To override the local setting if set externally + .build(); + String masterOnlyNode = internalCluster().startMasterOnlyNode(nodeSettings); + String node1 = internalCluster().startNode(nodeSettings); + ZenDiscovery zenDiscovery = (ZenDiscovery) internalCluster().getInstance(Discovery.class, masterOnlyNode); + ClusterService clusterService = internalCluster().getInstance(ClusterService.class, node1); + final ClusterState state = clusterService.state(); + MetaData.Builder mdBuilder = MetaData.builder(state.metaData()); + mdBuilder.putCustom(CustomMetaData.TYPE, new CustomMetaData("data")); + ClusterState stateWithCustomMetaData = ClusterState.builder(state).metaData(mdBuilder).build(); + + final AtomicReference holder = new AtomicReference<>(); + DiscoveryNode node = state.nodes().localNode(); + zenDiscovery.handleJoinRequest(node, stateWithCustomMetaData, new MembershipAction.JoinCallback() { + @Override + public void onSuccess() { + } + + @Override + public void onFailure(Throwable t) { + holder.set((IllegalStateException) t); + } + }); + + assertThat(holder.get(), notNullValue()); + assertThat(holder.get().getMessage(), equalTo("failure when sending a validation request to node")); + } + + public static class CustomMetaData extends TestCustomMetaData { + public static final String TYPE = "custom_md"; + + CustomMetaData(String data) { + super(data); + } + + @Override + protected TestCustomMetaData newTestCustomMetaData(String data) { + return new CustomMetaData(data); + } + + @Override + public String type() { + return TYPE; + } + + @Override + public EnumSet context() { + return EnumSet.of(MetaData.XContentContext.GATEWAY, MetaData.XContentContext.SNAPSHOT); + } + } + public void testHandleNodeJoin_incompatibleMinVersion() throws UnknownHostException { Settings nodeSettings = Settings.settingsBuilder() .put("discovery.type", "zen") // <-- To override the local setting if set externally .build(); String nodeName = internalCluster().startNode(nodeSettings, Version.V_2_0_0_beta1); ZenDiscovery zenDiscovery = (ZenDiscovery) internalCluster().getInstance(Discovery.class, nodeName); - + ClusterService clusterService = internalCluster().getInstance(ClusterService.class, nodeName); DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), Version.V_1_6_0); final AtomicReference holder = new AtomicReference<>(); - zenDiscovery.handleJoinRequest(node, new MembershipAction.JoinCallback() { + zenDiscovery.handleJoinRequest(node, clusterService.state(), new MembershipAction.JoinCallback() { @Override public void onSuccess() { } diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java index bc5e97ce08e..dfaf407f850 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java @@ -28,9 +28,19 @@ import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.discovery.zen.publish.PendingClusterStatesQueue.ClusterStateContext; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; public class PendingClusterStatesQueueTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 8dea09ba093..6faa02e16d7 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -21,7 +21,11 @@ package org.elasticsearch.discovery.zen.publish; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -34,30 +38,45 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; import org.elasticsearch.node.service.NodeService; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BytesTransportRequest; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportConnectionListener; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseOptions; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.local.LocalTransport; import org.junit.After; import org.junit.Before; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; @TestLogging("discovery.zen.publish:TRACE") public class PublishClusterStateActionTests extends ESTestCase { @@ -138,7 +157,7 @@ public class PublishClusterStateActionTests extends ESTestCase { public MockNode createMockNode(String name, Settings settings, Version version, @Nullable ClusterStateListener listener) throws Exception { settings = Settings.builder() .put("name", name) - .put(TransportService.SETTING_TRACE_LOG_INCLUDE, "", TransportService.SETTING_TRACE_LOG_EXCLUDE, "NOTHING") + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") .put(settings) .build(); @@ -219,7 +238,7 @@ public class PublishClusterStateActionTests extends ESTestCase { protected MockPublishAction buildPublishClusterStateAction(Settings settings, MockTransportService transportService, DiscoveryNodesProvider nodesProvider, PublishClusterStateAction.NewPendingClusterStateListener listener) { - DiscoverySettings discoverySettings = new DiscoverySettings(settings, new NodeSettingsService(settings)); + DiscoverySettings discoverySettings = new DiscoverySettings(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); return new MockPublishAction(settings, transportService, nodesProvider, listener, discoverySettings, ClusterName.DEFAULT); } @@ -327,7 +346,7 @@ public class PublishClusterStateActionTests extends ESTestCase { } public void testDisablingDiffPublishing() throws Exception { - Settings noDiffPublishingSettings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE, false).build(); + Settings noDiffPublishingSettings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), false).build(); MockNode nodeA = createMockNode("nodeA", noDiffPublishingSettings, Version.CURRENT, new ClusterStateListener() { @Override @@ -366,7 +385,7 @@ public class PublishClusterStateActionTests extends ESTestCase { public void testSimultaneousClusterStatePublishing() throws Exception { int numberOfNodes = randomIntBetween(2, 10); int numberOfIterations = scaledRandomIntBetween(5, 50); - Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE, randomBoolean()).build(); + Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), randomBoolean()).build(); MockNode master = createMockNode("node0", settings, Version.CURRENT, new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { @@ -492,8 +511,8 @@ public class PublishClusterStateActionTests extends ESTestCase { final boolean expectingToCommit = randomBoolean(); Settings.Builder settings = Settings.builder(); // make sure we have a reasonable timeout if we expect to timeout, o.w. one that will make the test "hang" - settings.put(DiscoverySettings.COMMIT_TIMEOUT, expectingToCommit == false && timeOutNodes > 0 ? "100ms" : "1h") - .put(DiscoverySettings.PUBLISH_TIMEOUT, "5ms"); // test is about committing + settings.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), expectingToCommit == false && timeOutNodes > 0 ? "100ms" : "1h") + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "5ms"); // test is about committing MockNode master = createMockNode("master", settings.build()); @@ -677,7 +696,7 @@ public class PublishClusterStateActionTests extends ESTestCase { */ public void testTimeoutOrCommit() throws Exception { Settings settings = Settings.builder() - .put(DiscoverySettings.COMMIT_TIMEOUT, "1ms").build(); // short but so we will sometime commit sometime timeout + .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "1ms").build(); // short but so we will sometime commit sometime timeout MockNode master = createMockNode("master", settings); MockNode node = createMockNode("node", settings); @@ -867,5 +886,15 @@ public class PublishClusterStateActionTests extends ESTestCase { this.error.set(error); assertThat(response.get(), nullValue()); } + + @Override + public long getRequestId() { + return 0; + } + + @Override + public String getChannelType() { + return "capturing"; + } } } diff --git a/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java b/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java index 47ab7deb995..52f19d7deee 100644 --- a/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.document; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; @@ -29,6 +28,7 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java index 5f3b0567e32..a661575cbec 100644 --- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java @@ -30,12 +30,16 @@ import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.*; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GTE; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LT; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LTE; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; /** */ diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java index e25b95be578..60cf2ef5dc1 100644 --- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java +++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java @@ -33,10 +33,15 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; -import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.*; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GT; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GTE; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LT; +import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LTE; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; /** */ @@ -430,4 +435,4 @@ public class FieldStatsTests extends ESSingleNodeTestCase { assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0)); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index 15ddc9dd771..0de220a8fa3 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -56,8 +56,8 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { ClusterChangedEvent generateEvent(boolean initializing, boolean versionChanged, boolean masterEligible) { //ridiculous settings to make sure we don't run into uninitialized because fo default AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 100) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 100) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", 100) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100) .build()); @@ -110,8 +110,8 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { ClusterChangedEvent generateCloseEvent(boolean masterEligible) { //ridiculous settings to make sure we don't run into uninitialized because fo default AllocationService strategy = createAllocationService(settingsBuilder() - .put("cluster.routing.allocation.concurrent_recoveries", 100) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.node_concurrent_recoveries", 100) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", 100) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100) .build()); diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 441314b1e35..98b62dc18ba 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -19,7 +19,12 @@ package org.elasticsearch.gateway; import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.store.*; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -28,7 +33,11 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -41,10 +50,20 @@ import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import java.util.stream.StreamSupport; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; @LuceneTestCase.SuppressFileSystems("ExtrasFS") // TODO: fix test to work with ExtrasFS public class MetaDataStateFormatTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index 73cbb51faed..44c1fae6492 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -27,7 +27,11 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.RestoreSource; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.common.Nullable; @@ -36,13 +40,14 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESAllocationTestCase; import org.junit.Before; -import java.io.IOException; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; /** */ @@ -59,25 +64,29 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { this.testAllocator = new TestAllocator(); } - /** - * Verifies that the canProcess method of primary allocation behaves correctly - * and processes only the applicable shard. - */ - public void testNoProcessReplica() { - ShardRouting shard = TestShardRouting.newShardRouting("test", 0, null, null, null, false, ShardRoutingState.UNASSIGNED, 0, new UnassignedInfo(UnassignedInfo.Reason.CLUSTER_RECOVERED, null)); - assertThat(testAllocator.needToFindPrimaryCopy(shard), equalTo(false)); - } - - public void testNoProcessPrimayNotAllcoatedBefore() { - ShardRouting shard = TestShardRouting.newShardRouting("test", 0, null, null, null, true, ShardRoutingState.UNASSIGNED, 0, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); - assertThat(testAllocator.needToFindPrimaryCopy(shard), equalTo(false)); + public void testNoProcessPrimaryNotAllocatedBefore() { + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), randomBoolean(), Version.CURRENT); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), true, Version.V_2_1_0); + } + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(false)); + assertThat(allocation.routingNodes().unassigned().size(), equalTo(1)); + assertThat(allocation.routingNodes().unassigned().iterator().next().shardId(), equalTo(shardId)); } /** * Tests that when async fetch returns that there is no data, the shard will not be allocated. */ public void testNoAsyncFetchData() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders()); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.CURRENT, "allocId"); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_0); + } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -85,11 +94,17 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { } /** - * Tests when the node returns that no data was found for it (-1), it will be moved to ignore unassigned. + * Tests when the node returns that no data was found for it (-1 for version and null for allocation id), + * it will be moved to ignore unassigned. */ public void testNoAllocationFound() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders()); - testAllocator.addData(node1, -1); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.CURRENT, "allocId"); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_0); + } + testAllocator.addData(node1, -1, null); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -97,11 +112,43 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { } /** - * Tests when the node returns that no data was found for it (-1), it will be moved to ignore unassigned. + * Tests when the node returns data with a shard allocation id that does not match active allocation ids, it will be moved to ignore unassigned. + */ + public void testNoMatchingAllocationIdFound() { + RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.CURRENT, "id2"); + testAllocator.addData(node1, 1, "id1"); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(false)); + assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); + assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); + } + + /** + * Tests that when there is a node to allocate the shard to, and there are no active allocation ids, it will be allocated to it. + * This is the case when we have old shards from pre-3.0 days. + */ + public void testNoActiveAllocationIds() { + RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_1); + testAllocator.addData(node1, 1, null); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(true)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(node1.id())); + } + + /** + * Tests when the node returns that no data was found for it, it will be moved to ignore unassigned. */ public void testStoreException() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders()); - testAllocator.addData(node1, 3, new CorruptIndexException("test", "test")); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); + testAllocator.addData(node1, 1, "allocId1", new CorruptIndexException("test", "test")); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_1); + testAllocator.addData(node1, 3, null, new CorruptIndexException("test", "test")); + } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -112,8 +159,14 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { * Tests that when there is a node to allocate the shard to, it will be allocated to it. */ public void testFoundAllocationAndAllocating() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders()); - testAllocator.addData(node1, 10); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); + testAllocator.addData(node1, 1, "allocId1"); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_2_0); + testAllocator.addData(node1, 3, null); + } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -126,8 +179,14 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { * it will be moved to ignore unassigned until it can be allocated to. */ public void testFoundAllocationButThrottlingDecider() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(throttleAllocationDeciders()); - testAllocator.addData(node1, 10); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(throttleAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); + testAllocator.addData(node1, 1, "allocId1"); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(throttleAllocationDeciders(), false, Version.V_2_2_0); + testAllocator.addData(node1, 3, null); + } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -139,8 +198,14 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { * force the allocation to it. */ public void testFoundAllocationButNoDecider() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(noAllocationDeciders()); - testAllocator.addData(node1, 10); + final RoutingAllocation allocation; + if (randomBoolean()) { + allocation = routingAllocationWithOnePrimaryNoReplicas(noAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); + testAllocator.addData(node1, 1, "allocId1"); + } else { + allocation = routingAllocationWithOnePrimaryNoReplicas(noAllocationDeciders(), false, Version.V_2_0_0); + testAllocator.addData(node1, 3, null); + } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -149,11 +214,11 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { } /** - * Tests that the highest version node is chosed for allocation. + * Tests that the highest version node is chosen for allocation. */ - public void testAllocateToTheHighestVersion() { - RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders()); - testAllocator.addData(node1, 10).addData(node2, 12); + public void testAllocateToTheHighestVersionOnLegacyIndex() { + RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_0_0); + testAllocator.addData(node1, 10, null).addData(node2, 12, null); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -162,35 +227,150 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { } /** - * Tests that when restoring from snapshot, even if we didn't find any node to allocate on, the shard - * will remain in the unassigned list to be allocated later. + * Tests that when restoring from a snapshot and we find a node with a shard copy and allocation + * deciders say yes, we allocate to that node. */ - public void testRestoreIgnoresNoNodesToAllocate() { - MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) - .build(); - RoutingTable routingTable = RoutingTable.builder() - .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), Version.CURRENT, shardId.getIndex())) - .build(); - ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) - .metaData(metaData) - .routingTable(routingTable) - .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), state.getRoutingNodes(), state.nodes(), null, System.nanoTime()); + public void testRestore() { + RoutingAllocation allocation = getRestoreRoutingAllocation(yesAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(true)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + } - testAllocator.addData(node1, -1).addData(node2, -1); + /** + * Tests that when restoring from a snapshot and we find a node with a shard copy and allocation + * deciders say throttle, we add it to ignored shards. + */ + public void testRestoreThrottle() { + RoutingAllocation allocation = getRestoreRoutingAllocation(throttleAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(false)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(false)); + } + + /** + * Tests that when restoring from a snapshot and we find a node with a shard copy but allocation + * deciders say no, we still allocate to that node. + */ + public void testRestoreForcesAllocateIfShardAvailable() { + RoutingAllocation allocation = getRestoreRoutingAllocation(noAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "some allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(true)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + } + + /** + * Tests that when restoring from a snapshot and we don't find a node with a shard copy, the shard will remain in + * the unassigned list to be allocated later. + */ + public void testRestoreDoesNotAssignIfNoShardAvailable() { + RoutingAllocation allocation = getRestoreRoutingAllocation(yesAllocationDeciders()); + testAllocator.addData(node1, -1, null); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().unassigned().size(), equalTo(1)); + } + + private RoutingAllocation getRestoreRoutingAllocation(AllocationDeciders allocationDeciders) { + Version version = randomFrom(Version.CURRENT, Version.V_2_0_0); + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version)).numberOfShards(1).numberOfReplicas(0) + .putActiveAllocationIds(0, version == Version.CURRENT ? new HashSet<>(Arrays.asList("allocId")) : Collections.emptySet())) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), version, shardId.getIndex())) + .build(); + ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) + .metaData(metaData) + .routingTable(routingTable) + .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); + return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); + } + + /** + * Tests that when recovering using "recover_on_any_node" and we find a node with a shard copy and allocation + * deciders say yes, we allocate to that node. + */ + public void testRecoverOnAnyNode() { + RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(yesAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(true)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + } + + /** + * Tests that when recovering using "recover_on_any_node" and we find a node with a shard copy and allocation + * deciders say throttle, we add it to ignored shards. + */ + public void testRecoverOnAnyNodeThrottle() { + RoutingAllocation allocation = getRestoreRoutingAllocation(throttleAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(false)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(false)); + } + + /** + * Tests that when recovering using "recover_on_any_node" and we find a node with a shard copy but allocation + * deciders say no, we still allocate to that node. + */ + public void testRecoverOnAnyNodeForcesAllocateIfShardAvailable() { + RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(noAllocationDeciders()); + testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(true)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + } + + /** + * Tests that when recovering using "recover_on_any_node" and we don't find a node with a shard copy we let + * BalancedShardAllocator assign the shard + */ + public void testRecoverOnAnyNodeDoesNotAssignIfNoShardAvailable() { + RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(yesAllocationDeciders()); + testAllocator.addData(node1, -1, null); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(false)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().unassigned().size(), equalTo(1)); + } + + private RoutingAllocation getRecoverOnAnyNodeRoutingAllocation(AllocationDeciders allocationDeciders) { + Version version = randomFrom(Version.CURRENT, Version.V_2_0_0); + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version) + .put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true) + .put(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, true)) + .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, version == Version.CURRENT ? new HashSet<>(Arrays.asList("allocId")) : Collections.emptySet())) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), Version.CURRENT, shardId.getIndex())) + .build(); + ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) + .metaData(metaData) + .routingTable(routingTable) + .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); + return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); } /** * Tests that only when enough copies of the shard exists we are going to allocate it. This test * verifies that with same version (1), and quorum allocation. */ - public void testEnoughCopiesFoundForAllocation() { + public void testEnoughCopiesFoundForAllocationOnLegacyIndex() { MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(2)) + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2)) .build(); RoutingTable routingTable = RoutingTable.builder() .addAsRecovery(metaData.index(shardId.getIndex())) @@ -207,7 +387,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node1, 1); + testAllocator.addData(node1, 1, null); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); @@ -215,7 +395,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node2, 1); + testAllocator.addData(node2, 1, null); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); @@ -229,9 +409,9 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { * Tests that only when enough copies of the shard exists we are going to allocate it. This test * verifies that even with different version, we treat different versions as a copy, and count them. */ - public void testEnoughCopiesFoundForAllocationWithDifferentVersion() { + public void testEnoughCopiesFoundForAllocationOnLegacyIndexWithDifferentVersion() { MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(2)) + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2)) .build(); RoutingTable routingTable = RoutingTable.builder() .addAsRecovery(metaData.index(shardId.getIndex())) @@ -248,7 +428,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node1, 1); + testAllocator.addData(node1, 1, null); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); @@ -256,7 +436,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node2, 2); + testAllocator.addData(node2, 2, null); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); @@ -266,67 +446,20 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(node2.id())); } - public void testAllocationOnAnyNodeWithSharedFs() { - ShardRouting shard = TestShardRouting.newShardRouting("test", 0, null, null, null, false, - ShardRoutingState.UNASSIGNED, 0, - new UnassignedInfo(UnassignedInfo.Reason.CLUSTER_RECOVERED, null)); - - Map data = new HashMap<>(); - data.put(node1, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node1, 1)); - data.put(node2, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node2, 5)); - data.put(node3, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node3, -1)); - AsyncShardFetch.FetchResult fetches = - new AsyncShardFetch.FetchResult(shardId, data, new HashSet<>(), new HashSet<>()); - - PrimaryShardAllocator.NodesAndVersions nAndV = testAllocator.buildNodesAndVersions(shard, false, new HashSet(), fetches); - assertThat(nAndV.allocationsFound, equalTo(2)); - assertThat(nAndV.highestVersion, equalTo(5L)); - assertThat(nAndV.nodes, contains(node2)); - - nAndV = testAllocator.buildNodesAndVersions(shard, true, new HashSet(), fetches); - assertThat(nAndV.allocationsFound, equalTo(3)); - assertThat(nAndV.highestVersion, equalTo(5L)); - // All three nodes are potential candidates because shards can be recovered on any node - assertThat(nAndV.nodes, contains(node2, node1, node3)); - } - - public void testAllocationOnAnyNodeShouldPutNodesWithExceptionsLast() { - ShardRouting shard = TestShardRouting.newShardRouting("test", 0, null, null, null, false, - ShardRoutingState.UNASSIGNED, 0, - new UnassignedInfo(UnassignedInfo.Reason.CLUSTER_RECOVERED, null)); - - Map data = new HashMap<>(); - data.put(node1, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node1, 1)); - data.put(node2, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node2, 1)); - data.put(node3, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node3, 1, new IOException("I failed to open"))); - HashSet ignoredNodes = new HashSet<>(); - ignoredNodes.add(node2.id()); - AsyncShardFetch.FetchResult fetches = - new AsyncShardFetch.FetchResult(shardId, data, new HashSet<>(), ignoredNodes); - - PrimaryShardAllocator.NodesAndVersions nAndV = testAllocator.buildNodesAndVersions(shard, false, ignoredNodes, fetches); - assertThat(nAndV.allocationsFound, equalTo(1)); - assertThat(nAndV.highestVersion, equalTo(1L)); - assertThat(nAndV.nodes, contains(node1)); - - nAndV = testAllocator.buildNodesAndVersions(shard, true, ignoredNodes, fetches); - assertThat(nAndV.allocationsFound, equalTo(2)); - assertThat(nAndV.highestVersion, equalTo(1L)); - // node3 should be last here - assertThat(nAndV.nodes.size(), equalTo(2)); - assertThat(nAndV.nodes, contains(node1, node3)); - } - - private RoutingAllocation routingAllocationWithOnePrimaryNoReplicas(AllocationDeciders deciders) { + private RoutingAllocation routingAllocationWithOnePrimaryNoReplicas(AllocationDeciders deciders, boolean asNew, Version version, String... activeAllocationIds) { MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) - .build(); - RoutingTable routingTable = RoutingTable.builder() - .addAsRecovery(metaData.index(shardId.getIndex())) - .build(); + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version)) + .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, new HashSet<>(Arrays.asList(activeAllocationIds)))) + .build(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + if (asNew) { + routingTableBuilder.addAsNew(metaData.index(shardId.getIndex())); + } else { + routingTableBuilder.addAsRecovery(metaData.index(shardId.getIndex())); + } ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) .metaData(metaData) - .routingTable(routingTable) + .routingTable(routingTableBuilder.build()) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); return new RoutingAllocation(deciders, new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); } @@ -344,15 +477,15 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { return this; } - public TestAllocator addData(DiscoveryNode node, long version) { - return addData(node, version, null); + public TestAllocator addData(DiscoveryNode node, long version, String allocationId) { + return addData(node, version, allocationId, null); } - public TestAllocator addData(DiscoveryNode node, long version, @Nullable Throwable storeException) { + public TestAllocator addData(DiscoveryNode node, long version, String allocationId, @Nullable Throwable storeException) { if (data == null) { data = new HashMap<>(); } - data.put(node, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node, version, storeException)); + data.put(node, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node, version, allocationId, storeException)); return this; } diff --git a/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java index 88499bf96cd..3b7e62216ce 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java @@ -19,11 +19,20 @@ package org.elasticsearch.gateway; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; public class PriorityComparatorTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java index edde1720474..a817b23949f 100644 --- a/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java @@ -20,10 +20,10 @@ package org.elasticsearch.gateway; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -32,14 +32,10 @@ import org.elasticsearch.test.InternalTestCluster.RestartCallback; import java.util.concurrent.TimeUnit; import static org.elasticsearch.client.Requests.clusterHealthRequest; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; /** * @@ -51,72 +47,12 @@ public class QuorumGatewayIT extends ESIntegTestCase { return 2; } - public void testChangeInitialShardsRecovery() throws Exception { - logger.info("--> starting 3 nodes"); - final String[] nodes = internalCluster().startNodesAsync(3).get().toArray(new String[0]); - - createIndex("test"); - ensureGreen(); - NumShards test = getNumShards("test"); - - logger.info("--> indexing..."); - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); - //We don't check for failures in the flush response: if we do we might get the following: - // FlushNotAllowedEngineException[[test][1] recovery is in progress, flush [COMMIT_TRANSLOG] is not allowed] - flush(); - client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).get(); - refresh(); - - for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2l); - } - - final String nodeToRemove = nodes[between(0,2)]; - logger.info("--> restarting 1 nodes -- kill 2"); - internalCluster().fullRestart(new RestartCallback() { - @Override - public Settings onNodeStopped(String nodeName) throws Exception { - return Settings.EMPTY; - } - - @Override - public boolean doRestart(String nodeName) { - return nodeToRemove.equals(nodeName); - } - }); - if (randomBoolean()) { - Thread.sleep(between(1, 400)); // wait a bit and give is a chance to try to allocate - } - ClusterHealthResponse clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForNodes("1")).actionGet(); - assertThat(clusterHealth.isTimedOut(), equalTo(false)); - assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.RED)); // nothing allocated yet - assertTrue(awaitBusy(() -> { - ClusterStateResponse clusterStateResponse = internalCluster().smartClient().admin().cluster().prepareState().setMasterNodeTimeout("500ms").get(); - return clusterStateResponse.getState() != null && clusterStateResponse.getState().routingTable().index("test") != null; - })); // wait until we get a cluster state - could be null if we quick enough. - final ClusterStateResponse clusterStateResponse = internalCluster().smartClient().admin().cluster().prepareState().setMasterNodeTimeout("500ms").get(); - assertThat(clusterStateResponse.getState(), notNullValue()); - assertThat(clusterStateResponse.getState().routingTable().index("test"), notNullValue()); - assertThat(clusterStateResponse.getState().routingTable().index("test").allPrimaryShardsActive(), is(false)); - logger.info("--> change the recovery.initial_shards setting, and make sure its recovered"); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put("recovery.initial_shards", 1)).get(); - - logger.info("--> running cluster_health (wait for the shards to startup), primaries only since we only have 1 node"); - clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(test.numPrimaries)).actionGet(); - logger.info("--> done cluster_health, status " + clusterHealth.getStatus()); - assertThat(clusterHealth.isTimedOut(), equalTo(false)); - assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); - - for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2l); - } - } - public void testQuorumRecovery() throws Exception { logger.info("--> starting 3 nodes"); - internalCluster().startNodesAsync(3).get(); // we are shutting down nodes - make sure we don't have 2 clusters if we test network - setMinimumMasterNodes(2); + internalCluster().startNodesAsync(3, + Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2).build()).get(); + createIndex("test"); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java index 2184fda47c4..dbdf747de63 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java @@ -82,9 +82,9 @@ public class RecoveryBackwardsCompatibilityIT extends ESBackcompatTestCase { SearchResponse countResponse = client().prepareSearch().setSize(0).get(); assertHitCount(countResponse, numDocs); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none")).execute().actionGet(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none")).execute().actionGet(); backwardsCluster().upgradeAllNodes(); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "all")).execute().actionGet(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all")).execute().actionGet(); ensureGreen(); countResponse = client().prepareSearch().setSize(0).get(); diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 01c76b465a9..f0650a1cbda 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.indices.flush.SyncedFlushUtil; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -320,14 +319,13 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { assertThat(state.metaData().index("test").getAliases().get("test_alias").filter(), notNullValue()); } - @TestLogging("gateway:TRACE,indices.recovery:TRACE,index.engine:TRACE") public void testReusePeerRecovery() throws Exception { final Settings settings = settingsBuilder() .put("action.admin.cluster.node.shutdown.delay", "10ms") .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) .put("gateway.recover_after_nodes", 4) - - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CONCURRENT_RECOVERIES, 4) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, 4) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, 4) .put(MockFSDirectoryService.CRASH_INDEX, false).build(); internalCluster().startNodesAsync(4, settings).get(); @@ -360,7 +358,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings() .setTransientSettings(settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.NONE)) + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)) .get(); logger.info("--> full cluster restart"); internalCluster().fullRestart(); @@ -369,7 +367,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { ensureGreen(); } else { logger.info("--> trying to sync flush"); - assertEquals(SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test").failedShards(), 0); + assertEquals(client().admin().indices().prepareSyncedFlush("test").get().failedShards(), 0); assertSyncIdsNotNull(); } @@ -377,7 +375,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings() .setTransientSettings(settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.NONE)) + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)) .get(); logger.info("--> full cluster restart"); internalCluster().fullRestart(); diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index 9a053b36527..0a6ddca5d24 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -27,7 +27,15 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; @@ -43,9 +51,11 @@ import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; import org.elasticsearch.test.ESAllocationTestCase; import org.junit.Before; +import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; @@ -275,13 +285,16 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { } private RoutingAllocation onePrimaryOnNode1And1Replica(AllocationDeciders deciders, Settings settings, UnassignedInfo.Reason reason) { + ShardRouting primaryShard = TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT).put(settings)).numberOfShards(1).numberOfReplicas(0)) - .build(); + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT).put(settings)) + .numberOfShards(1).numberOfReplicas(1) + .putActiveAllocationIds(0, new HashSet<>(Arrays.asList(primaryShard.allocationId().getId())))) + .build(); RoutingTable routingTable = RoutingTable.builder() .add(IndexRoutingTable.builder(shardId.getIndex()) .addIndexShard(new IndexShardRoutingTable.Builder(shardId) - .addShard(TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10)) + .addShard(primaryShard) .addShard(ShardRouting.newUnassigned(shardId.getIndex(), shardId.getId(), null, false, new UnassignedInfo(reason, null))) .build()) ) @@ -294,13 +307,16 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { } private RoutingAllocation onePrimaryOnNode1And1ReplicaRecovering(AllocationDeciders deciders) { + ShardRouting primaryShard = TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) + .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)) + .numberOfShards(1).numberOfReplicas(1) + .putActiveAllocationIds(0, new HashSet<>(Arrays.asList(primaryShard.allocationId().getId())))) .build(); RoutingTable routingTable = RoutingTable.builder() .add(IndexRoutingTable.builder(shardId.getIndex()) .addIndexShard(new IndexShardRoutingTable.Builder(shardId) - .addShard(TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10)) + .addShard(primaryShard) .addShard(TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node2.id(), null, null, false, ShardRoutingState.INITIALIZING, 10, new UnassignedInfo(UnassignedInfo.Reason.CLUSTER_RECOVERED, null))) .build()) ) diff --git a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java new file mode 100644 index 00000000000..aca3906d185 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java @@ -0,0 +1,159 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gateway; + +import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; +import org.elasticsearch.action.admin.indices.stats.IndexStats; +import org.elasticsearch.action.admin.indices.stats.ShardStats; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.indices.recovery.RecoveryState; + +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.test.ESIntegTestCase.client; +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThat; + +/** + * Test of file reuse on recovery shared between integration tests and backwards + * compatibility tests. + */ +public class ReusePeerRecoverySharedTest { + /** + * Test peer reuse on recovery. This is shared between RecoverFromGatewayIT + * and RecoveryBackwardsCompatibilityIT. + * + * @param indexSettings + * settings for the index to test + * @param restartCluster + * runnable that will restart the cluster under test + * @param logger + * logger for logging + * @param useSyncIds + * should this use synced flush? can't use synced from in the bwc + * tests + */ + public static void testCase(Settings indexSettings, Runnable restartCluster, ESLogger logger, boolean useSyncIds) { + /* + * prevent any rebalance actions during the peer recovery if we run into + * a relocation the reuse count will be 0 and this fails the test. We + * are testing here if we reuse the files on disk after full restarts + * for replicas. + */ + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put(indexSettings) + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE))); + client().admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("30s").get(); + logger.info("--> indexing docs"); + for (int i = 0; i < 1000; i++) { + client().prepareIndex("test", "type").setSource("field", "value").execute().actionGet(); + if ((i % 200) == 0) { + client().admin().indices().prepareFlush().execute().actionGet(); + } + } + if (randomBoolean()) { + client().admin().indices().prepareFlush().execute().actionGet(); + } + logger.info("--> running cluster health"); + client().admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("30s").get(); + // just wait for merges + client().admin().indices().prepareForceMerge("test").setMaxNumSegments(100).get(); + client().admin().indices().prepareFlush().setWaitIfOngoing(true).setForce(true).get(); + + if (useSyncIds == false) { + logger.info("--> disabling allocation while the cluster is shut down"); + + // Disable allocations while we are closing nodes + client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder() + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)).get(); + logger.info("--> full cluster restart"); + restartCluster.run(); + + logger.info("--> waiting for cluster to return to green after first shutdown"); + client().admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("30s").get(); + } else { + logger.info("--> trying to sync flush"); + assertEquals(client().admin().indices().prepareSyncedFlush("test").get().failedShards(), 0); + assertSyncIdsNotNull(); + } + + logger.info("--> disabling allocation while the cluster is shut down", useSyncIds ? "" : " a second time"); + // Disable allocations while we are closing nodes + client().admin().cluster().prepareUpdateSettings().setTransientSettings( + settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)) + .get(); + logger.info("--> full cluster restart"); + restartCluster.run(); + + logger.info("--> waiting for cluster to return to green after {}shutdown", useSyncIds ? "" : "second "); + client().admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("30s").get(); + + if (useSyncIds) { + assertSyncIdsNotNull(); + } + RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("test").get(); + for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) { + long recovered = 0; + for (RecoveryState.File file : recoveryState.getIndex().fileDetails()) { + if (file.name().startsWith("segments")) { + recovered += file.length(); + } + } + if (!recoveryState.getPrimary() && (useSyncIds == false)) { + logger.info("--> replica shard {} recovered from {} to {}, recovered {}, reuse {}", recoveryState.getShardId().getId(), + recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(), + recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes()); + assertThat("no bytes should be recovered", recoveryState.getIndex().recoveredBytes(), equalTo(recovered)); + assertThat("data should have been reused", recoveryState.getIndex().reusedBytes(), greaterThan(0l)); + // we have to recover the segments file since we commit the translog ID on engine startup + assertThat("all bytes should be reused except of the segments file", recoveryState.getIndex().reusedBytes(), + equalTo(recoveryState.getIndex().totalBytes() - recovered)); + assertThat("no files should be recovered except of the segments file", recoveryState.getIndex().recoveredFileCount(), + equalTo(1)); + assertThat("all files should be reused except of the segments file", recoveryState.getIndex().reusedFileCount(), + equalTo(recoveryState.getIndex().totalFileCount() - 1)); + assertThat("> 0 files should be reused", recoveryState.getIndex().reusedFileCount(), greaterThan(0)); + } else { + if (useSyncIds && !recoveryState.getPrimary()) { + logger.info("--> replica shard {} recovered from {} to {} using sync id, recovered {}, reuse {}", + recoveryState.getShardId().getId(), recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(), + recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes()); + } + assertThat(recoveryState.getIndex().recoveredBytes(), equalTo(0l)); + assertThat(recoveryState.getIndex().reusedBytes(), equalTo(recoveryState.getIndex().totalBytes())); + assertThat(recoveryState.getIndex().recoveredFileCount(), equalTo(0)); + assertThat(recoveryState.getIndex().reusedFileCount(), equalTo(recoveryState.getIndex().totalFileCount())); + } + } + } + + public static void assertSyncIdsNotNull() { + IndexStats indexStats = client().admin().indices().prepareStats("test").get().getIndex("test"); + for (ShardStats shardStats : indexStats.getShards()) { + assertNotNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index f41f4adc74e..cce4c0d22c4 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -242,25 +242,6 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getResponses()[0].getResponse().getField("field").getValues().get(0).toString(), equalTo("value1")); } - public void testRealtimeGetWithCompressBackcompat() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1).put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)) - .addMapping("type", jsonBuilder().startObject().startObject("type").startObject("_source").field("compress", true).endObject().endObject().endObject())); - ensureGreen(); - - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < 10000; i++) { - sb.append((char) i); - } - String fieldValue = sb.toString(); - client().prepareIndex("test", "type", "1").setSource("field", fieldValue).get(); - - // realtime get - GetResponse getResponse = client().prepareGet("test", "type", "1").get(); - assertThat(getResponse.isExists(), equalTo(true)); - assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo(fieldValue)); - } - public void testGetDocWithMultivaluedFields() throws Exception { String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") @@ -874,7 +855,7 @@ public class GetActionIT extends ESIntegTestCase { public void testUngeneratedFieldsThatAreNeverStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + @@ -913,7 +894,7 @@ public class GetActionIT extends ESIntegTestCase { public void testUngeneratedFieldsThatAreAlwaysStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + @@ -948,67 +929,10 @@ public class GetActionIT extends ESIntegTestCase { assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); } - public void testUngeneratedFieldsPartOfSourceUnstoredSourceDisabledBackcompat() throws IOException { - indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(false, false); - String[] fieldsList = {}; - // before refresh - document is only in translog - assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); - refresh(); - //after refresh - document is in translog and also indexed - assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); - flush(); - //after flush - document is in not anymore translog - only indexed - assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); - } - - public void testUngeneratedFieldsPartOfSourceEitherStoredOrSourceEnabledBackcompat() throws IOException { - boolean stored = randomBoolean(); - boolean sourceEnabled = true; - if (stored) { - sourceEnabled = randomBoolean(); - } - indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(stored, sourceEnabled); - String[] fieldsList = {}; - // before refresh - document is only in translog - assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); - refresh(); - //after refresh - document is in translog and also indexed - assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); - flush(); - //after flush - document is in not anymore translog - only indexed - assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); - } - - void indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(boolean stored, boolean sourceEnabled) { - String storedString = stored ? "yes" : "no"; - String createIndexSource = "{\n" + - " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + - " \"refresh_interval\": \"-1\",\n" + - " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + - " },\n" + - " \"mappings\": {\n" + - " \"doc\": {\n" + - " \"_source\": {\n" + - " \"enabled\": " + sourceEnabled + "\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); - ensureGreen(); - String doc = "{\n" + - " \"my_boost\": 5.0,\n" + - " \"_ttl\": \"1h\"\n" + - "}\n"; - - client().prepareIndex("test", "doc").setId("1").setSource(doc).setRouting("1").get(); - } - public void testUngeneratedFieldsNotPartOfSourceStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + @@ -1074,7 +998,7 @@ public class GetActionIT extends ESIntegTestCase { String storedString = stored ? "yes" : "no"; String createIndexSource = "{\n" + " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\",\n" + " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + " },\n" + @@ -1126,7 +1050,7 @@ public class GetActionIT extends ESIntegTestCase { String storedString = stored ? "yes" : "no"; String createIndexSource = "{\n" + " \"settings\": {\n" + - " \"index.translog.disable_flush\": true,\n" + + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\",\n" + " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + " },\n" + diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java index e58df29aa3e..139e1a0647d 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java @@ -18,16 +18,28 @@ */ package org.elasticsearch.http.netty; -import java.nio.charset.StandardCharsets; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.jboss.netty.bootstrap.ClientBootstrap; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.ChannelFuture; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ChannelPipeline; +import org.jboss.netty.channel.ChannelPipelineFactory; +import org.jboss.netty.channel.Channels; +import org.jboss.netty.channel.ExceptionEvent; +import org.jboss.netty.channel.MessageEvent; +import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; -import org.jboss.netty.handler.codec.http.*; +import org.jboss.netty.handler.codec.http.DefaultHttpRequest; +import org.jboss.netty.handler.codec.http.HttpChunkAggregator; +import org.jboss.netty.handler.codec.http.HttpClientCodec; +import org.jboss.netty.handler.codec.http.HttpMethod; +import org.jboss.netty.handler.codec.http.HttpRequest; +import org.jboss.netty.handler.codec.http.HttpResponse; import java.io.Closeable; import java.net.SocketAddress; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index a83b638aa9a..50ac44bb202 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -22,7 +22,11 @@ import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.*; +import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.TermStatistics; +import org.apache.lucene.search.Weight; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.Version; diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 3f97fe402fa..316badf376b 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -158,4 +159,22 @@ public class IndexSettingsTests extends ESTestCase { } + public void testUpdateDurability() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, "async") + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + assertEquals(Translog.Durability.ASYNC, settings.getTranslogDurability()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, "request").build())); + assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability()); // test default + } + + } diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index e214cea9cc1..8333080dac5 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -34,7 +34,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShadowIndexShard; import org.elasticsearch.index.translog.TranslogStats; @@ -180,7 +181,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { Settings idxSettings = Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2) - .put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true) + .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) .put(IndexMetaData.SETTING_DATA_PATH, dataPath.toAbsolutePath().toString()) .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) .put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true) diff --git a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java index 21ecdf710b7..1f083466896 100644 --- a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java @@ -19,15 +19,14 @@ package org.elasticsearch.index; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.fd.FaultDetection; import org.elasticsearch.plugins.Plugin; @@ -56,7 +55,7 @@ public class TransportIndexFailuresIT extends ESIntegTestCase { .put("discovery.type", "zen") // <-- To override the local setting if set externally .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly .put(FaultDetection.SETTING_PING_RETRIES, "1") // <-- for hitting simulated network failures quickly - .put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put("discovery.zen.minimum_master_nodes", 1) .build(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index 7cd16e350a4..f844d9ac7a6 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -53,7 +53,10 @@ import java.util.Collections; import java.util.Set; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; /** * diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java index cd5138b4e0c..f467aa289f8 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java @@ -36,7 +36,11 @@ import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.VersionUtils; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.instanceOf; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java index e685c21422b..a097d55f4a3 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java @@ -42,7 +42,9 @@ import java.util.Collections; import java.util.List; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.instanceOf; /** */ diff --git a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java index d931b478f3e..6da1a7721a4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java @@ -22,7 +22,12 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.ngram.*; +import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; +import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; +import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenFilter; +import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenizer; +import org.apache.lucene.analysis.ngram.Lucene43NGramTokenizer; +import org.apache.lucene.analysis.ngram.NGramTokenizer; import org.apache.lucene.analysis.reverse.ReverseStringFilter; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java index 6fa2e21fbd1..ab0a24d9dd8 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java @@ -19,14 +19,14 @@ package org.elasticsearch.index.analysis; * under the License. */ -import java.io.IOException; -import java.util.Arrays; -import java.util.regex.Pattern; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.core.StopAnalyzer; import org.elasticsearch.test.ESTokenStreamTestCase; +import java.io.IOException; +import java.util.Arrays; +import java.util.regex.Pattern; + /** * Verifies the behavior of PatternAnalyzer. */ @@ -38,13 +38,13 @@ public class PatternAnalyzerTests extends ESTokenStreamTestCase { public void testNonWordPattern() throws IOException { // Split on non-letter pattern, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\W+"), false, null); - assertAnalyzesTo(a, "The quick brown Fox,the abcd1234 (56.78) dc.", + assertAnalyzesTo(a, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "The", "quick", "brown", "Fox", "the", "abcd1234", "56", "78", "dc" }); // split on non-letter pattern, lowercase, english stopwords - PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\W+"), true, + PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\W+"), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET); - assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", + assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "quick", "brown", "fox", "abcd1234", "56", "78", "dc" }); } @@ -55,13 +55,13 @@ public class PatternAnalyzerTests extends ESTokenStreamTestCase { public void testWhitespacePattern() throws IOException { // Split on whitespace patterns, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); - assertAnalyzesTo(a, "The quick brown Fox,the abcd1234 (56.78) dc.", + assertAnalyzesTo(a, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "The", "quick", "brown", "Fox,the", "abcd1234", "(56.78)", "dc." }); // Split on whitespace patterns, lowercase, english stopwords - PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\s+"), true, + PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\s+"), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET); - assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", + assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "quick", "brown", "fox,the", "abcd1234", "(56.78)", "dc." }); } @@ -72,13 +72,13 @@ public class PatternAnalyzerTests extends ESTokenStreamTestCase { public void testCustomPattern() throws IOException { // Split on comma, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile(","), false, null); - assertAnalyzesTo(a, "Here,Are,some,Comma,separated,words,", + assertAnalyzesTo(a, "Here,Are,some,Comma,separated,words,", new String[] { "Here", "Are", "some", "Comma", "separated", "words" }); // split on comma, lowercase, english stopwords PatternAnalyzer b = new PatternAnalyzer(Pattern.compile(","), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET); - assertAnalyzesTo(b, "Here,Are,some,Comma,separated,words,", + assertAnalyzesTo(b, "Here,Are,some,Comma,separated,words,", new String[] { "here", "some", "comma", "separated", "words" }); } @@ -102,10 +102,10 @@ public class PatternAnalyzerTests extends ESTokenStreamTestCase { // Split on whitespace patterns, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); - assertAnalyzesTo(a, document.toString(), + assertAnalyzesTo(a, document.toString(), new String[] { new String(largeWord), new String(largeWord2) }); } - + /** blast some random strings through the analyzer */ public void testRandomStrings() throws Exception { Analyzer a = new PatternAnalyzer(Pattern.compile(","), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index fecb7e9b880..297cab86f5e 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; @@ -154,7 +155,7 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("analyzer", analyzerName).endObject().endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper fieldMapper = docMapper.mappers().getMapper("field"); assertThat(fieldMapper.fieldType().searchAnalyzer(), instanceOf(NamedAnalyzer.class)); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java index 2e2a45fab6a..186f6ac1cb7 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; - import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.StopFilter; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java index aa063a1d37e..1dbd9ac2bd9 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java @@ -26,7 +26,6 @@ import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter; import org.apache.lucene.util.Version; -import org.elasticsearch.common.inject.ProvisionException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.test.ESTokenStreamTestCase; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java index 52730dd2616..f7c346c6570 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java @@ -34,8 +34,6 @@ import java.io.InputStream; import java.io.StringReader; import java.nio.file.Files; import java.nio.file.Path; - -import static org.hamcrest.Matchers.instanceOf; public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { Settings settings = Settings.settingsBuilder() diff --git a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 56bf966dd41..d2bf6bebc5c 100644 --- a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -23,7 +23,13 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.LogByteSizeMergePolicy; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.join.BitSetProducer; diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineIT.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineIT.java deleted file mode 100644 index 76c07edcb0d..00000000000 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineIT.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.engine; - -import org.elasticsearch.action.admin.indices.segments.IndexSegments; -import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; -import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; -import org.elasticsearch.action.admin.indices.segments.ShardSegments; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESIntegTestCase; -import org.hamcrest.Matchers; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -public class InternalEngineIT extends ESIntegTestCase { - public void testSetIndexCompoundOnFlush() { - client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put("number_of_replicas", 0).put("number_of_shards", 1)).get(); - ensureGreen(); - client().prepareIndex("test", "foo").setSource("field", "foo").get(); - refresh(); - assertTotalCompoundSegments(1, 1, "test"); - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, false)).get(); - client().prepareIndex("test", "foo").setSource("field", "foo").get(); - refresh(); - assertTotalCompoundSegments(1, 2, "test"); - - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true)).get(); - client().prepareIndex("test", "foo").setSource("field", "foo").get(); - refresh(); - assertTotalCompoundSegments(2, 3, "test"); - } - - private void assertTotalCompoundSegments(int i, int t, String index) { - IndicesSegmentResponse indicesSegmentResponse = client().admin().indices().prepareSegments(index).get(); - assertNotNull("indices segments response should contain indices", indicesSegmentResponse.getIndices()); - IndexSegments indexSegments = indicesSegmentResponse.getIndices().get(index); - assertNotNull(indexSegments); - assertNotNull(indexSegments.getShards()); - Collection values = indexSegments.getShards().values(); - int compounds = 0; - int total = 0; - for (IndexShardSegments indexShardSegments : values) { - for (ShardSegments s : indexShardSegments) { - for (Segment segment : s) { - if (segment.isSearch() && segment.getNumDocs() > 0) { - if (segment.isCompound()) { - compounds++; - } - total++; - } - } - } - } - assertThat(compounds, Matchers.equalTo(i)); - assertThat(total, Matchers.equalTo(t)); - } - - private Set segments(IndexSegments segments) { - Set segmentSet = new HashSet<>(); - for (IndexShardSegments s : segments) { - for (ShardSegments shardSegments : s) { - segmentSet.addAll(shardSegments.getSegments()); - } - } - return segmentSet; - } -} diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java index 1d4eb6159e7..8487da8280a 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java @@ -36,10 +36,6 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { // INDEX_COMPOUND_ON_FLUSH InternalEngine engine = ((InternalEngine) EngineAccess.engine(service.getShardOrNull(0))); assertThat(engine.getCurrentIndexWriterConfig().getUseCompoundFile(), is(true)); - client().admin().indices().prepareUpdateSettings("foo").setSettings(Settings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, false).build()).get(); - assertThat(engine.getCurrentIndexWriterConfig().getUseCompoundFile(), is(false)); - client().admin().indices().prepareUpdateSettings("foo").setSettings(Settings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true).build()).get(); - assertThat(engine.getCurrentIndexWriterConfig().getUseCompoundFile(), is(true)); // VERSION MAP SIZE @@ -54,15 +50,13 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { long gcDeletes = random().nextLong() & (Long.MAX_VALUE >> 11); Settings build = Settings.builder() - .put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, compoundOnFlush) .put(EngineConfig.INDEX_GC_DELETES_SETTING, gcDeletes, TimeUnit.MILLISECONDS) .build(); assertEquals(gcDeletes, build.getAsTime(EngineConfig.INDEX_GC_DELETES_SETTING, null).millis()); client().admin().indices().prepareUpdateSettings("foo").setSettings(build).get(); LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig(); - assertEquals(engine.config().isCompoundOnFlush(), compoundOnFlush); - assertEquals(currentIndexWriterConfig.getUseCompoundFile(), compoundOnFlush); + assertEquals(currentIndexWriterConfig.getUseCompoundFile(), true); assertEquals(engine.config().getGcDeletesInMillis(), gcDeletes); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index ddbde9da14c..d176a01b61a 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -28,7 +28,17 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy; +import org.apache.lucene.index.LiveIndexWriterConfig; +import org.apache.lucene.index.LogByteSizeMergePolicy; +import org.apache.lucene.index.LogDocMergePolicy; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; @@ -62,13 +72,25 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.engine.Engine.Searcher; import org.elasticsearch.index.indexing.ShardIndexingService; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperForType; +import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.Mapper.BuilderContext; +import org.elasticsearch.index.mapper.MapperBuilders; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.Mapping; +import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IndexSearcherWrapper; +import org.elasticsearch.index.shard.MergeSchedulerConfig; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardUtils; +import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.DirectoryUtils; @@ -92,7 +114,12 @@ import java.nio.charset.Charset; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; @@ -103,7 +130,12 @@ import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.emptyMap; import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY; import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class InternalEngineTests extends ESTestCase { @@ -138,7 +170,6 @@ public class InternalEngineTests extends ESTestCase { codecName = "default"; } defaultSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder() - .put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, randomBoolean()) .put(EngineConfig.INDEX_GC_DELETES_SETTING, "1h") // make sure this doesn't kick in on us .put(EngineConfig.INDEX_CODEC_SETTING, codecName) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -222,7 +253,7 @@ public class InternalEngineTests extends ESTestCase { } protected Translog createTranslog(Path translogPath) throws IOException { - TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, INDEX_SETTINGS, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool); + TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, INDEX_SETTINGS, BigArrays.NON_RECYCLING_INSTANCE); return new Translog(translogConfig); } @@ -240,7 +271,7 @@ public class InternalEngineTests extends ESTestCase { public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy) { IndexWriterConfig iwc = newIndexWriterConfig(); - TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool); + TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE); EngineConfig config = new EngineConfig(shardId, threadPool, new ShardIndexingService(shardId, INDEX_SETTINGS), indexSettings , null, store, createSnapshotDeletionPolicy(), mergePolicy, mergeSchedulerConfig, @@ -269,7 +300,6 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.isEmpty(), equalTo(true)); assertThat(engine.segmentsStats().getCount(), equalTo(0l)); assertThat(engine.segmentsStats().getMemoryInBytes(), equalTo(0l)); - final boolean defaultCompound = defaultSettings.getSettings().getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true); // create a doc and refresh ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); @@ -292,7 +322,7 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(0).ramTree, nullValue()); engine.flush(); @@ -304,10 +334,7 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); - - engine.config().setCompoundOnFlush(false); - engine.onSettingsChanged(); + assertThat(segments.get(0).isCompound(), equalTo(true)); ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); engine.index(new Engine.Index(newUid("3"), doc3)); @@ -326,14 +353,14 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(false)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); engine.delete(new Engine.Delete("test", "1", newUid("1"))); @@ -347,15 +374,14 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(1)); assertThat(segments.get(0).getDeletedDocs(), equalTo(1)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(false)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); - engine.config().setCompoundOnFlush(true); engine.onSettingsChanged(); ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); engine.index(new Engine.Index(newUid("4"), doc4)); @@ -369,13 +395,13 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(1)); assertThat(segments.get(0).getDeletedDocs(), equalTo(1)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(false)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); assertThat(segments.get(2).isCommitted(), equalTo(false)); assertThat(segments.get(2).isSearch(), equalTo(true)); @@ -1868,9 +1894,8 @@ public class InternalEngineTests extends ESTestCase { SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry(); MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry); - DocumentMapper.Builder b = new DocumentMapper.Builder(settings, rootBuilder, mapperService); - DocumentMapperParser parser = mapperService.documentMapperParser(); - this.docMapper = b.build(mapperService, parser); + DocumentMapper.Builder b = new DocumentMapper.Builder(rootBuilder, mapperService); + this.docMapper = b.build(mapperService); } @Override @@ -1906,14 +1931,14 @@ public class InternalEngineTests extends ESTestCase { Translog.TranslogGeneration generation = engine.getTranslog().getGeneration(); engine.close(); - Translog translog = new Translog(new TranslogConfig(shardId, createTempDir(), INDEX_SETTINGS, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool)); + Translog translog = new Translog(new TranslogConfig(shardId, createTempDir(), INDEX_SETTINGS, BigArrays.NON_RECYCLING_INSTANCE)); translog.add(new Translog.Index("test", "SomeBogusId", "{}".getBytes(Charset.forName("UTF-8")))); assertEquals(generation.translogFileGeneration, translog.currentFileGeneration()); translog.close(); EngineConfig config = engine.config(); /* create a TranslogConfig that has been created with a different UUID */ - TranslogConfig translogConfig = new TranslogConfig(shardId, translog.location(), config.getIndexSettings(), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool); + TranslogConfig translogConfig = new TranslogConfig(shardId, translog.location(), config.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE); EngineConfig brokenConfig = new EngineConfig(shardId, threadPool, config.getIndexingService(), config.getIndexSettings() , null, store, createSnapshotDeletionPolicy(), newMergePolicy(), config.getMergeSchedulerConfig(), diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index 3fe7a540bf8..214bc343a8a 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -23,7 +23,13 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy; +import org.apache.lucene.index.LiveIndexWriterConfig; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; @@ -71,7 +77,12 @@ import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; /** * TODO: document me! @@ -107,7 +118,6 @@ public class ShadowEngineTests extends ESTestCase { codecName = "default"; } defaultSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder() - .put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, randomBoolean()) .put(EngineConfig.INDEX_GC_DELETES_SETTING, "1h") // make sure this doesn't kick in on us .put(EngineConfig.INDEX_CODEC_SETTING, codecName) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -213,7 +223,7 @@ public class ShadowEngineTests extends ESTestCase { public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy) { IndexWriterConfig iwc = newIndexWriterConfig(); - TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool); + TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE); EngineConfig config = new EngineConfig(shardId, threadPool, new ShardIndexingService(shardId, indexSettings), indexSettings , null, store, createSnapshotDeletionPolicy(), mergePolicy, mergeSchedulerConfig, iwc.getAnalyzer(), iwc.getSimilarity() , new CodecService(null, logger), new Engine.EventListener() { @@ -269,7 +279,6 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.isEmpty(), equalTo(true)); assertThat(primaryEngine.segmentsStats().getCount(), equalTo(0l)); assertThat(primaryEngine.segmentsStats().getMemoryInBytes(), equalTo(0l)); - final boolean defaultCompound = defaultSettings.getSettings().getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true); // create a doc and refresh ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); @@ -292,7 +301,7 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertTrue(segments.get(0).isCompound()); assertThat(segments.get(0).ramTree, nullValue()); // Check that the replica sees nothing @@ -320,7 +329,7 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); segments = replicaEngine.segments(false); assertThat(segments.size(), equalTo(1)); @@ -329,12 +338,9 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); - primaryEngine.config().setCompoundOnFlush(false); - primaryEngine.onSettingsChanged(); - ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); primaryEngine.index(new Engine.Index(newUid("3"), doc3)); primaryEngine.refresh("test"); @@ -352,12 +358,12 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(false)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); // Make visible to shadow replica primaryEngine.flush(); @@ -376,12 +382,12 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(true)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); primaryEngine.delete(new Engine.Delete("test", "1", newUid("1"))); primaryEngine.refresh("test"); @@ -394,20 +400,17 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(1)); assertThat(segments.get(0).getDeletedDocs(), equalTo(1)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(true)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); // Make visible to shadow replica primaryEngine.flush(); replicaEngine.refresh("test"); - primaryEngine.config().setCompoundOnFlush(true); - primaryEngine.onSettingsChanged(); - ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); primaryEngine.index(new Engine.Index(newUid("4"), doc4)); primaryEngine.refresh("test"); @@ -420,13 +423,13 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(1)); assertThat(segments.get(0).getDeletedDocs(), equalTo(1)); - assertThat(segments.get(0).isCompound(), equalTo(defaultCompound)); + assertThat(segments.get(0).isCompound(), equalTo(true)); assertThat(segments.get(1).isCommitted(), equalTo(true)); assertThat(segments.get(1).isSearch(), equalTo(true)); assertThat(segments.get(1).getNumDocs(), equalTo(1)); assertThat(segments.get(1).getDeletedDocs(), equalTo(0)); - assertThat(segments.get(1).isCompound(), equalTo(false)); + assertThat(segments.get(1).isCompound(), equalTo(true)); assertThat(segments.get(2).isCommitted(), equalTo(false)); assertThat(segments.get(2).isSearch(), equalTo(true)); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index bed9b480c01..70e3b66553c 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -23,7 +23,13 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.LogByteSizeMergePolicy; +import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.RAMDirectory; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java index 741ef3804b1..87cf5e1c570 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java @@ -24,13 +24,14 @@ import org.apache.lucene.document.GeoPointField; import org.apache.lucene.document.StringField; import org.apache.lucene.util.GeoUtils; import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.VersionUtils; import static org.elasticsearch.test.geo.RandomShapeGenerator.randomPoint; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * @@ -89,4 +90,4 @@ public abstract class AbstractGeoFieldDataTestCase extends AbstractFieldDataImpl } } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index b1f9d73de73..024a90ce7ba 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.fielddata; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Store; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java index 73fdd79b108..ca207fbdc2c 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java @@ -20,9 +20,9 @@ package org.elasticsearch.index.fielddata; import com.carrotsearch.hppc.ObjectArrayList; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -51,7 +51,7 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase { .endObject() .endObject().endObject().string(); - final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping); + final DocumentMapper mapper = mapperService.documentMapperParser().parse("test", new CompressedXContent(mapping)); ObjectArrayList bytesList1 = new ObjectArrayList<>(2); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java index 6c4673bbd0b..26ea97dbf15 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.fielddata; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; @@ -33,6 +32,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.English; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; @@ -75,7 +75,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { .startObject("float").field("type", "float").endObject() .startObject("double").field("type", "double").endObject() .endObject().endObject().endObject().string(); - final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping); + final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); Random random = getRandom(); int atLeast = scaledRandomIntBetween(200, 1500); for (int i = 0; i < atLeast; i++) { @@ -143,7 +143,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { .startObject("long").field("type", "long").endObject() .endObject().endObject().endObject().string(); - final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping); + final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); Random random = getRandom(); int atLeast = scaledRandomIntBetween(200, 1500); final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 10); @@ -220,7 +220,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { .startObject("double").field("type", "double").endObject() .endObject().endObject().endObject().string(); - final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping); + final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); Random random = getRandom(); int atLeast = scaledRandomIntBetween(200, 1500); final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 10); @@ -398,7 +398,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { .startObject("geopoint").field("type", "geo_point").startObject("fielddata").field("format", "doc_values").endObject().endObject() .endObject().endObject().endObject().string(); - final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping); + final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); Random random = getRandom(); int atLeast = scaledRandomIntBetween(200, 1500); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java index 59fc8952a67..13f7f74e37b 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java @@ -23,7 +23,11 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; @@ -86,14 +90,12 @@ public class FieldDataCacheTests extends ESTestCase { private SortedSetDVOrdinalsIndexFieldData createSortedDV(String fieldName, IndexFieldDataCache indexFieldDataCache) { FieldDataType fieldDataType = new StringFieldMapper.StringFieldType().fieldDataType(); - MappedFieldType.Names names = new MappedFieldType.Names(fieldName); - return new SortedSetDVOrdinalsIndexFieldData(createIndexSettings(), indexFieldDataCache, names, new NoneCircuitBreakerService(), fieldDataType); + return new SortedSetDVOrdinalsIndexFieldData(createIndexSettings(), indexFieldDataCache, fieldName, new NoneCircuitBreakerService(), fieldDataType); } private PagedBytesIndexFieldData createPagedBytes(String fieldName, IndexFieldDataCache indexFieldDataCache) { FieldDataType fieldDataType = new StringFieldMapper.StringFieldType().fieldDataType(); - MappedFieldType.Names names = new MappedFieldType.Names(fieldName); - return new PagedBytesIndexFieldData(createIndexSettings(), names, fieldDataType, indexFieldDataCache, new NoneCircuitBreakerService()); + return new PagedBytesIndexFieldData(createIndexSettings(), fieldName, fieldDataType, indexFieldDataCache, new NoneCircuitBreakerService()); } private IndexSettings createIndexSettings() { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java index 21780fdf1fa..943af64363e 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java @@ -18,7 +18,9 @@ */ package org.elasticsearch.index.fielddata; -import org.apache.lucene.document.*; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.elasticsearch.index.fielddata.plain.AbstractAtomicGeoPointFieldData; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index b2f818c89f0..3d4f63daa33 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -23,7 +23,12 @@ import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.Accountable; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; @@ -36,10 +41,16 @@ import org.elasticsearch.index.fielddata.plain.SortedNumericDVIndexFieldData; import org.elasticsearch.index.fielddata.plain.SortedSetDVOrdinalsIndexFieldData; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MapperBuilders; -import org.elasticsearch.index.mapper.core.*; +import org.elasticsearch.index.mapper.core.BooleanFieldMapper; +import org.elasticsearch.index.mapper.core.ByteFieldMapper; +import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.core.FloatFieldMapper; +import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.ShortFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; @@ -139,7 +150,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { final AtomicInteger onRemovalCalled = new AtomicInteger(); ifdService.setListener(new IndexFieldDataCache.Listener() { @Override - public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) { if (wrap) { assertEquals(new ShardId("test", 1), shardId); } else { @@ -149,7 +160,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { } @Override - public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { if (wrap) { assertEquals(new ShardId("test", 1), shardId); } else { @@ -177,12 +188,12 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { try { shardPrivateService.setListener(new IndexFieldDataCache.Listener() { @Override - public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) { } @Override - public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { } }); @@ -197,7 +208,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { try { IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null, threadPool); IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings(new Index("test"), Settings.EMPTY), cache, null, null); - ft.setNames(new Names("some_long")); + ft.setName("some_long"); ft.setHasDocValues(true); ifds.getForField(ft); // no exception ft.setHasDocValues(false); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java index 230330dbbf5..f1fb694b295 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java @@ -19,12 +19,10 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; /** Returns an implementation based on paged bytes which doesn't implement WithOrdinals in order to visit different paths in the code, @@ -40,8 +38,8 @@ public class NoOrdinalsStringFieldDataTests extends PagedBytesStringFieldDataTes } @Override - public Names getFieldNames() { - return in.getFieldNames(); + public String getFieldName() { + return in.getFieldName(); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java index e25b5c73c69..63b66f47d1a 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java @@ -21,87 +21,91 @@ package org.elasticsearch.index.fielddata.fieldcomparator; import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedDocValuesField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.test.ESTestCase; public class ReplaceMissingTests extends ESTestCase { - + public void test() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setMergePolicy(newLogMergePolicy()); IndexWriter iw = new IndexWriter(dir, iwc); - + Document doc = new Document(); doc.add(new SortedDocValuesField("field", new BytesRef("cat"))); iw.addDocument(doc); - + doc = new Document(); iw.addDocument(doc); - + doc = new Document(); doc.add(new SortedDocValuesField("field", new BytesRef("dog"))); iw.addDocument(doc); iw.forceMerge(1); iw.close(); - + DirectoryReader reader = DirectoryReader.open(dir); LeafReader ar = getOnlySegmentReader(reader); SortedDocValues raw = ar.getSortedDocValues("field"); assertEquals(2, raw.getValueCount()); - + // existing values SortedDocValues dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("cat")); assertEquals(2, dv.getValueCount()); assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("dog", dv.lookupOrd(1).utf8ToString()); - + assertEquals(0, dv.getOrd(0)); assertEquals(0, dv.getOrd(1)); assertEquals(1, dv.getOrd(2)); - + dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("dog")); assertEquals(2, dv.getValueCount()); assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("dog", dv.lookupOrd(1).utf8ToString()); - + assertEquals(0, dv.getOrd(0)); assertEquals(1, dv.getOrd(1)); assertEquals(1, dv.getOrd(2)); - + // non-existing values dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("apple")); assertEquals(3, dv.getValueCount()); assertEquals("apple", dv.lookupOrd(0).utf8ToString()); assertEquals("cat", dv.lookupOrd(1).utf8ToString()); assertEquals("dog", dv.lookupOrd(2).utf8ToString()); - + assertEquals(1, dv.getOrd(0)); assertEquals(0, dv.getOrd(1)); assertEquals(2, dv.getOrd(2)); - + dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("company")); assertEquals(3, dv.getValueCount()); assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("company", dv.lookupOrd(1).utf8ToString()); assertEquals("dog", dv.lookupOrd(2).utf8ToString()); - + assertEquals(0, dv.getOrd(0)); assertEquals(1, dv.getOrd(1)); assertEquals(2, dv.getOrd(2)); - + dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("ebay")); assertEquals(3, dv.getValueCount()); assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("dog", dv.lookupOrd(1).utf8ToString()); assertEquals("ebay", dv.lookupOrd(2).utf8ToString()); - + assertEquals(0, dv.getOrd(0)); assertEquals(2, dv.getOrd(1)); assertEquals(1, dv.getOrd(2)); - + reader.close(); dir.close(); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java new file mode 100644 index 00000000000..06307614a51 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.document.Field; +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.analysis.NamedAnalyzer; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Arrays; +import java.util.List; + +public class DocumentFieldMapperTests extends LuceneTestCase { + + private static class FakeAnalyzer extends Analyzer { + + private final String output; + + public FakeAnalyzer(String output) { + this.output = output; + } + + @Override + protected TokenStreamComponents createComponents(String fieldName) { + Tokenizer tokenizer = new Tokenizer() { + boolean incremented = false; + CharTermAttribute term = addAttribute(CharTermAttribute.class); + + @Override + public boolean incrementToken() throws IOException { + if (incremented) { + return false; + } + term.setLength(0).append(output); + incremented = true; + return true; + } + }; + return new TokenStreamComponents(tokenizer); + } + + } + + static class FakeFieldType extends MappedFieldType { + + public FakeFieldType() { + super(); + } + + FakeFieldType(FakeFieldType other) { + super(other); + } + + @Override + public MappedFieldType clone() { + return new FakeFieldType(this); + } + + @Override + public String typeName() { + return "fake"; + } + + } + + static class FakeFieldMapper extends FieldMapper { + + private static final Settings SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + + public FakeFieldMapper(String simpleName, MappedFieldType fieldType) { + super(simpleName, fieldType.clone(), fieldType.clone(), SETTINGS, null, null); + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + } + + @Override + protected String contentType() { + return null; + } + + } + + public void testAnalyzers() throws IOException { + FakeFieldType fieldType1 = new FakeFieldType(); + fieldType1.setName("field1"); + fieldType1.setIndexAnalyzer(new NamedAnalyzer("foo", new FakeAnalyzer("index"))); + fieldType1.setSearchAnalyzer(new NamedAnalyzer("bar", new FakeAnalyzer("search"))); + fieldType1.setSearchQuoteAnalyzer(new NamedAnalyzer("baz", new FakeAnalyzer("search_quote"))); + FieldMapper fieldMapper1 = new FakeFieldMapper("field1", fieldType1); + + FakeFieldType fieldType2 = new FakeFieldType(); + fieldType2.setName("field2"); + FieldMapper fieldMapper2 = new FakeFieldMapper("field2", fieldType2); + + Analyzer defaultIndex = new FakeAnalyzer("default_index"); + Analyzer defaultSearch = new FakeAnalyzer("default_search"); + Analyzer defaultSearchQuote = new FakeAnalyzer("default_search_quote"); + + DocumentFieldMappers documentFieldMappers = new DocumentFieldMappers(Arrays.asList(fieldMapper1, fieldMapper2), defaultIndex, defaultSearch, defaultSearchQuote); + + assertAnalyzes(documentFieldMappers.indexAnalyzer(), "field1", "index"); + assertAnalyzes(documentFieldMappers.searchAnalyzer(), "field1", "search"); + assertAnalyzes(documentFieldMappers.searchQuoteAnalyzer(), "field1", "search_quote"); + + assertAnalyzes(documentFieldMappers.indexAnalyzer(), "field2", "default_index"); + assertAnalyzes(documentFieldMappers.searchAnalyzer(), "field2", "default_search"); + assertAnalyzes(documentFieldMappers.searchQuoteAnalyzer(), "field2", "default_search_quote"); + } + + private void assertAnalyzes(Analyzer analyzer, String field, String output) throws IOException { + try (TokenStream tok = analyzer.tokenStream(field, new StringReader(""))) { + CharTermAttribute term = tok.addAttribute(CharTermAttribute.class); + assertTrue(tok.incrementToken()); + assertEquals(output, term.toString()); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index fccf642e9df..3206a5e87ae 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -31,7 +32,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .field("enabled", false).endObject().endObject().string(); - DocumentMapper mapper = mapperParser.parse(mapping); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); BytesReference bytes = XContentFactory.jsonBuilder() .startObject().startObject("foo") @@ -48,7 +49,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject("foo").field("enabled", false).endObject() .startObject("bar").field("type", "integer").endObject() .endObject().endObject().endObject().string(); - DocumentMapper mapper = mapperParser.parse(mapping); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); BytesReference bytes = XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java new file mode 100644 index 00000000000..f6cfcec041a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java @@ -0,0 +1,114 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.index.TransportIndexAction; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.AutoCreateIndex; +import org.elasticsearch.cluster.action.shard.ShardStateAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.local.LocalTransport; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.test.cluster.TestClusterService; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import static org.hamcrest.CoreMatchers.instanceOf; + +import java.util.Collections; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { + + private static ThreadPool THREAD_POOL; + private TestClusterService clusterService; + private LocalTransport transport; + private TransportService transportService; + private IndicesService indicesService; + private ShardStateAction shardStateAction; + private ActionFilters actionFilters; + private IndexNameExpressionResolver indexNameExpressionResolver; + private AutoCreateIndex autoCreateIndex; + private Settings settings; + + @BeforeClass + public static void createThreadPool() { + THREAD_POOL = new ThreadPool("DynamicMappingDisabledTests"); + } + + @Override + public void setUp() throws Exception { + super.setUp(); + settings = Settings.builder() + .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING, false) + .build(); + clusterService = new TestClusterService(THREAD_POOL); + transport = new LocalTransport(settings, THREAD_POOL, Version.CURRENT, new NamedWriteableRegistry()); + transportService = new TransportService(transport, THREAD_POOL); + indicesService = getInstanceFromNode(IndicesService.class); + shardStateAction = new ShardStateAction(settings, clusterService, transportService, null, null); + actionFilters = new ActionFilters(Collections.emptySet()); + indexNameExpressionResolver = new IndexNameExpressionResolver(settings); + autoCreateIndex = new AutoCreateIndex(settings, indexNameExpressionResolver); + } + + @AfterClass + public static void destroyThreadPool() { + ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS); + // since static must set to null to be eligible for collection + THREAD_POOL = null; + } + + public void testDynamicDisabled() { + TransportIndexAction action = new TransportIndexAction(settings, transportService, clusterService, + indicesService, THREAD_POOL, shardStateAction, null, null, actionFilters, indexNameExpressionResolver, + autoCreateIndex); + + IndexRequest request = new IndexRequest("index", "type", "1"); + request.source("foo", 3); + final AtomicBoolean onFailureCalled = new AtomicBoolean(); + + action.execute(request, new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + fail("Indexing request should have failed"); + } + + @Override + public void onFailure(Throwable e) { + onFailureCalled.set(true); + assertThat(e, instanceOf(IndexNotFoundException.class)); + assertEquals(e.getMessage(), "no such index"); + } + }); + + assertTrue(onFailureCalled.get()); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 966ea01e95c..22a10ab8229 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -18,10 +18,12 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.index.IndexOptions; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -29,16 +31,17 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.FloatFieldMapper; import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper.LongFieldType; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -56,7 +59,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder() .startObject() @@ -76,7 +79,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder() .startObject() @@ -97,7 +100,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { defaultMapper.parse("test", "type", "1", jsonBuilder() @@ -132,7 +135,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder() .startObject().startObject("obj1") @@ -155,7 +158,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { defaultMapper.parse("test", "type", "1", jsonBuilder() @@ -218,7 +221,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("foo").field("type", "string").endObject().endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()); // foo is already defined in the mappings assertNull(update); @@ -231,7 +234,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("type").endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()); @@ -251,7 +254,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("foo").field("type", "string").endObject().endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject()); @@ -272,7 +275,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("type").endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject()); @@ -293,7 +296,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("type").endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject()); @@ -313,7 +316,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("type").endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo").value("bar").value("baz").endArray().endObject()); @@ -333,7 +336,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("foo").field("type", "object").endObject() .endObject().endObject().endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject()); @@ -353,7 +356,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .startObject("type").endObject() .endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo") @@ -370,17 +373,52 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { } public void testReuseExistingMappings() throws IOException, Exception { - IndexService indexService = createIndex("test", Settings.EMPTY, "type", "my_field1", "type=string,store=yes", "my_field2", "type=integer,precision_step=10"); + IndexService indexService = createIndex("test", Settings.EMPTY, "type", + "my_field1", "type=string,store=yes", + "my_field2", "type=integer,precision_step=10", + "my_field3", "type=long,doc_values=false", + "my_field4", "type=float,index_options=freqs", + "my_field5", "type=double,precision_step=14", + "my_field6", "type=date,doc_values=false"); // Even if the dynamic type of our new field is long, we already have a mapping for the same field // of type string so it should be mapped as a string DocumentMapper newMapper = indexService.mapperService().documentMapperWithAutoCreate("type2").getDocumentMapper(); Mapper update = parse(newMapper, indexService.mapperService().documentMapperParser(), - XContentFactory.jsonBuilder().startObject().field("my_field1", 42).endObject()); + XContentFactory.jsonBuilder().startObject() + .field("my_field1", 42) + .field("my_field2", 43) + .field("my_field3", 44) + .field("my_field4", 45) + .field("my_field5", 46) + .field("my_field6", 47) + .endObject()); Mapper myField1Mapper = null; + Mapper myField2Mapper = null; + Mapper myField3Mapper = null; + Mapper myField4Mapper = null; + Mapper myField5Mapper = null; + Mapper myField6Mapper = null; for (Mapper m : update) { - if (m.name().equals("my_field1")) { + switch (m.name()) { + case "my_field1": myField1Mapper = m; + break; + case "my_field2": + myField2Mapper = m; + break; + case "my_field3": + myField3Mapper = m; + break; + case "my_field4": + myField4Mapper = m; + break; + case "my_field5": + myField5Mapper = m; + break; + case "my_field6": + myField6Mapper = m; + break; } } assertNotNull(myField1Mapper); @@ -391,20 +429,28 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { // Even if dynamic mappings would map a numeric field as a long, here it should map it as a integer // since we already have a mapping of type integer - update = parse(newMapper, indexService.mapperService().documentMapperParser(), - XContentFactory.jsonBuilder().startObject().field("my_field2", 42).endObject()); - Mapper myField2Mapper = null; - for (Mapper m : update) { - if (m.name().equals("my_field2")) { - myField2Mapper = m; - } - } assertNotNull(myField2Mapper); // same type assertTrue(myField2Mapper instanceof IntegerFieldMapper); // and same option assertEquals(10, ((IntegerFieldMapper) myField2Mapper).fieldType().numericPrecisionStep()); + assertNotNull(myField3Mapper); + assertTrue(myField3Mapper instanceof LongFieldMapper); + assertFalse(((LongFieldType) ((LongFieldMapper) myField3Mapper).fieldType()).hasDocValues()); + + assertNotNull(myField4Mapper); + assertTrue(myField4Mapper instanceof FloatFieldMapper); + assertEquals(IndexOptions.DOCS_AND_FREQS, ((FieldMapper) myField4Mapper).fieldType().indexOptions()); + + assertNotNull(myField5Mapper); + assertTrue(myField5Mapper instanceof DoubleFieldMapper); + assertEquals(14, ((DoubleFieldMapper) myField5Mapper).fieldType().numericPrecisionStep()); + + assertNotNull(myField6Mapper); + assertTrue(myField6Mapper instanceof DateFieldMapper); + assertFalse(((DateFieldType) ((DateFieldMapper) myField6Mapper).fieldType()).hasDocValues()); + // This can't work try { parse(newMapper, indexService.mapperService().documentMapperParser(), @@ -415,6 +461,54 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { } } + public void testMixTemplateMultiFieldAndMappingReuse() throws Exception { + IndexService indexService = createIndex("test"); + XContentBuilder mappings1 = jsonBuilder().startObject() + .startObject("type1") + .startArray("dynamic_templates") + .startObject() + .startObject("template1") + .field("match_mapping_type", "string") + .startObject("mapping") + .field("type", "string") + .startObject("fields") + .startObject("raw") + .field("type", "string") + .field("index", "not_analyzed") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endArray() + .endObject().endObject(); + indexService.mapperService().merge("type1", new CompressedXContent(mappings1.bytes()), true, false); + XContentBuilder mappings2 = jsonBuilder().startObject() + .startObject("type2") + .startObject("properties") + .startObject("field") + .field("type", "string") + .endObject() + .endObject() + .endObject().endObject(); + indexService.mapperService().merge("type2", new CompressedXContent(mappings2.bytes()), true, false); + + XContentBuilder json = XContentFactory.jsonBuilder().startObject() + .field("field", "foo") + .endObject(); + SourceToParse source = SourceToParse.source(json.bytes()).id("1"); + DocumentMapper mapper = indexService.mapperService().documentMapper("type1"); + assertNull(mapper.mappers().getMapper("field.raw")); + ParsedDocument parsed = mapper.parse(source); + assertNotNull(parsed.dynamicMappingsUpdate()); + + indexService.mapperService().merge("type1", new CompressedXContent(parsed.dynamicMappingsUpdate().toString()), false, false); + mapper = indexService.mapperService().documentMapper("type1"); + assertNotNull(mapper.mappers().getMapper("field.raw")); + parsed = mapper.parse(source); + assertNull(parsed.dynamicMappingsUpdate()); + } + public void testDefaultFloatingPointMappings() throws IOException { DocumentMapper mapper = createIndex("test").mapperService().documentMapperWithAutoCreate("type").getDocumentMapper(); doTestDefaultFloatingPointMappings(mapper, XContentFactory.jsonBuilder()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index 5a31618f14e..c5dbd653bfe 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -31,20 +31,17 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; +import static org.hamcrest.Matchers.containsString; + public class FieldTypeLookupTests extends ESTestCase { public void testEmpty() { FieldTypeLookup lookup = new FieldTypeLookup(); assertNull(lookup.get("foo")); - assertNull(lookup.getByIndexName("foo")); assertEquals(Collections.emptySet(), lookup.getTypes("foo")); - assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo")); Collection names = lookup.simpleMatchToFullName("foo"); assertNotNull(names); assertTrue(names.isEmpty()); - names = lookup.simpleMatchToIndexNames("foo"); - assertNotNull(names); - assertTrue(names.isEmpty()); Iterator itr = lookup.iterator(); assertNotNull(itr); assertFalse(itr.hasNext()); @@ -53,7 +50,7 @@ public class FieldTypeLookupTests extends ESTestCase { public void testDefaultMapping() { FieldTypeLookup lookup = new FieldTypeLookup(); try { - lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, Collections.emptyList()); + lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, Collections.emptyList(), randomBoolean()); fail(); } catch (IllegalArgumentException expected) { assertEquals("Default mappings should not be added to the lookup", expected.getMessage()); @@ -62,117 +59,69 @@ public class FieldTypeLookupTests extends ESTestCase { public void testAddNewField() { FieldTypeLookup lookup = new FieldTypeLookup(); - FakeFieldMapper f = new FakeFieldMapper("foo", "bar"); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f)); + FakeFieldMapper f = new FakeFieldMapper("foo"); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), randomBoolean()); assertNull(lookup.get("foo")); assertNull(lookup.get("bar")); - assertNull(lookup.getByIndexName("foo")); - assertNull(lookup.getByIndexName("bar")); assertEquals(f.fieldType(), lookup2.get("foo")); assertNull(lookup.get("bar")); - assertEquals(f.fieldType(), lookup2.getByIndexName("bar")); - assertNull(lookup.getByIndexName("foo")); assertEquals(Collections.emptySet(), lookup.getTypes("foo")); - assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo")); assertEquals(Collections.emptySet(), lookup.getTypes("bar")); - assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("bar")); assertEquals(Collections.singleton("type"), lookup2.getTypes("foo")); - assertEquals(Collections.emptySet(), lookup2.getTypesByIndexName("foo")); assertEquals(Collections.emptySet(), lookup2.getTypes("bar")); - assertEquals(Collections.singleton("type"), lookup2.getTypesByIndexName("bar")); assertEquals(1, size(lookup2.iterator())); } public void testAddExistingField() { - FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); - MappedFieldType originalFieldType = f.fieldType(); - FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo"); + FakeFieldMapper f = new FakeFieldMapper("foo"); + FakeFieldMapper f2 = new FakeFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); + lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); - assertNotSame(originalFieldType, f.fieldType()); - assertSame(f.fieldType(), f2.fieldType()); - assertSame(f.fieldType(), lookup2.get("foo")); - assertSame(f.fieldType(), lookup2.getByIndexName("foo")); + assertSame(f2.fieldType(), lookup2.get("foo")); assertEquals(1, size(lookup2.iterator())); } public void testAddExistingIndexName() { - FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); - FakeFieldMapper f2 = new FakeFieldMapper("bar", "foo"); - MappedFieldType originalFieldType = f.fieldType(); + FakeFieldMapper f = new FakeFieldMapper("foo"); + FakeFieldMapper f2 = new FakeFieldMapper("bar"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); + lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); - assertNotSame(originalFieldType, f.fieldType()); - assertSame(f.fieldType(), f2.fieldType()); assertSame(f.fieldType(), lookup2.get("foo")); - assertSame(f.fieldType(), lookup2.get("bar")); - assertSame(f.fieldType(), lookup2.getByIndexName("foo")); + assertSame(f2.fieldType(), lookup2.get("bar")); assertEquals(2, size(lookup2.iterator())); } public void testAddExistingFullName() { - FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); - FakeFieldMapper f2 = new FakeFieldMapper("foo", "bar"); - MappedFieldType originalFieldType = f.fieldType(); + FakeFieldMapper f = new FakeFieldMapper("foo"); + FakeFieldMapper f2 = new FakeFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); - - assertNotSame(originalFieldType, f.fieldType()); - assertSame(f.fieldType(), f2.fieldType()); - assertSame(f.fieldType(), lookup2.get("foo")); - assertSame(f.fieldType(), lookup2.getByIndexName("foo")); - assertSame(f.fieldType(), lookup2.getByIndexName("bar")); - assertEquals(1, size(lookup2.iterator())); - } - - public void testAddExistingBridgeName() { - FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); - FakeFieldMapper f2 = new FakeFieldMapper("bar", "bar"); - FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f, f2)); - try { - FakeFieldMapper f3 = new FakeFieldMapper("foo", "bar"); - lookup.copyAndAddAll("type2", newList(f3)); - } catch (IllegalStateException e) { - assertTrue(e.getMessage().contains("insane mappings")); + lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [foo] has different [index_name]")); } - - try { - FakeFieldMapper f3 = new FakeFieldMapper("bar", "foo"); - lookup.copyAndAddAll("type2", newList(f3)); - } catch (IllegalStateException e) { - assertTrue(e.getMessage().contains("insane mappings")); - } - } - - public void testCheckCompatibilityNewField() { - FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar"); - FieldTypeLookup lookup = new FieldTypeLookup(); - lookup.checkCompatibility("type", newList(f1), false); } public void testCheckCompatibilityMismatchedTypes() { - FieldMapper f1 = new FakeFieldMapper("foo", "bar"); + FieldMapper f1 = new FakeFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); - MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo", "foo"); + MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo"); FieldMapper f2 = new FakeFieldMapper("foo", ft2); try { - lookup.checkCompatibility("type2", newList(f2), false); + lookup.copyAndAddAll("type2", newList(f2), false); fail("expected type mismatch"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); } // fails even if updateAllTypes == true try { - lookup.checkCompatibility("type2", newList(f2), true); + lookup.copyAndAddAll("type2", newList(f2), true); fail("expected type mismatch"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); @@ -180,65 +129,55 @@ public class FieldTypeLookupTests extends ESTestCase { } public void testCheckCompatibilityConflict() { - FieldMapper f1 = new FakeFieldMapper("foo", "bar"); + FieldMapper f1 = new FakeFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); - MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo", "bar"); + MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo"); ft2.setBoost(2.0f); FieldMapper f2 = new FakeFieldMapper("foo", ft2); try { // different type - lookup.checkCompatibility("type2", newList(f2), false); + lookup.copyAndAddAll("type2", newList(f2), false); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("to update [boost] across all types")); } - lookup.checkCompatibility("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types - lookup.checkCompatibility("type2", newList(f2), true); // boost is updateable, so ok if forcing + lookup.copyAndAddAll("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types + lookup.copyAndAddAll("type2", newList(f2), true); // boost is updateable, so ok if forcing // now with a non changeable setting - MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo", "bar"); + MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo"); ft3.setStored(true); FieldMapper f3 = new FakeFieldMapper("foo", ft3); try { - lookup.checkCompatibility("type2", newList(f3), false); + lookup.copyAndAddAll("type2", newList(f3), false); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("has different [store] values")); } // even with updateAllTypes == true, incompatible try { - lookup.checkCompatibility("type2", newList(f3), true); + lookup.copyAndAddAll("type2", newList(f3), true); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("has different [store] values")); } } - public void testSimpleMatchIndexNames() { - FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); - FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); - FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1, f2)); - Collection names = lookup.simpleMatchToIndexNames("b*"); - assertTrue(names.contains("baz")); - assertTrue(names.contains("boo")); - } - public void testSimpleMatchFullNames() { - FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); - FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); + FakeFieldMapper f1 = new FakeFieldMapper("foo"); + FakeFieldMapper f2 = new FakeFieldMapper("bar"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1, f2)); + lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean()); Collection names = lookup.simpleMatchToFullName("b*"); - assertTrue(names.contains("foo")); + assertFalse(names.contains("foo")); assertTrue(names.contains("bar")); } public void testIteratorImmutable() { - FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar"); + FakeFieldMapper f1 = new FakeFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); try { Iterator itr = lookup.iterator(); @@ -258,20 +197,20 @@ public class FieldTypeLookupTests extends ESTestCase { // this sucks how much must be overridden just do get a dummy field mapper... static class FakeFieldMapper extends FieldMapper { static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); - public FakeFieldMapper(String fullName, String indexName) { - super(fullName, makeFieldType(fullName, indexName), makeFieldType(fullName, indexName), dummySettings, null, null); + public FakeFieldMapper(String fullName) { + super(fullName, makeFieldType(fullName), makeFieldType(fullName), dummySettings, null, null); } public FakeFieldMapper(String fullName, MappedFieldType fieldType) { super(fullName, fieldType, fieldType, dummySettings, null, null); } - static MappedFieldType makeFieldType(String fullName, String indexName) { + static MappedFieldType makeFieldType(String fullName) { FakeFieldType fieldType = new FakeFieldType(); - fieldType.setNames(new MappedFieldType.Names(indexName, indexName, fullName)); + fieldType.setName(fullName); return fieldType; } - static MappedFieldType makeOtherFieldType(String fullName, String indexName) { + static MappedFieldType makeOtherFieldType(String fullName) { OtherFakeFieldType fieldType = new OtherFakeFieldType(); - fieldType.setNames(new MappedFieldType.Names(indexName, indexName, fullName)); + fieldType.setName(fullName); return fieldType; } static class FakeFieldType extends MappedFieldType { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index ca0cbf194d6..c8d7e4ac147 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -173,7 +173,7 @@ public abstract class FieldTypeTestCase extends ESTestCase { MappedFieldType createNamedDefaultFieldType() { MappedFieldType fieldType = createDefaultFieldType(); - fieldType.setNames(new MappedFieldType.Names("foo")); + fieldType.setName("foo"); return fieldType; } @@ -213,7 +213,7 @@ public abstract class FieldTypeTestCase extends ESTestCase { protected String toString(MappedFieldType ft) { return "MappedFieldType{" + - "names=" + ft.names() + + "name=" + ft.name() + ", boost=" + ft.boost() + ", docValues=" + ft.hasDocValues() + ", indexAnalyzer=" + ft.indexAnalyzer() + diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index f4a7507a0b8..035da8163ee 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -20,25 +20,20 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.IndexService; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Rule; import org.junit.rules.ExpectedException; -import static org.elasticsearch.test.VersionUtils.getFirstVersion; -import static org.elasticsearch.test.VersionUtils.getPreviousVersion; -import static org.elasticsearch.test.VersionUtils.randomVersionBetween; -import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.Matchers.hasToString; - import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.concurrent.ExecutionException; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.Matchers.hasToString; + public class MapperServiceTests extends ESSingleNodeTestCase { @Rule public ExpectedException expectedException = ExpectedException.none(); @@ -58,23 +53,6 @@ public class MapperServiceTests extends ESSingleNodeTestCase { .actionGet(); } - public void testThatLongTypeNameIsNotRejectedOnPreElasticsearchVersionTwo() { - String index = "text-index"; - String field = "field"; - String type = new String(new char[256]).replace("\0", "a"); - - CreateIndexResponse response = - client() - .admin() - .indices() - .prepareCreate(index) - .setSettings(settings(randomVersionBetween(random(), getFirstVersion(), getPreviousVersion(Version.V_2_0_0_beta1)))) - .addMapping(type, field, "type=string") - .execute() - .actionGet(); - assertNotNull(response); - } - public void testTypeNameTooLong() { String index = "text-index"; String field = "field"; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index bbba3432b66..841249510d1 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.all.AllEntries; import org.elasticsearch.common.lucene.all.AllField; @@ -49,18 +50,27 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.hamcrest.Matchers; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.nullValue; public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testSimpleAllMappers() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -79,7 +89,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testAllMappersNoBoost() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/noboost-mapping.json"); IndexService index = createIndex("test"); - DocumentMapper docMapper = index.mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = index.mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -93,7 +103,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testAllMappersTermQuery() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -111,7 +121,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { // #6187: make sure we see AllTermQuery even when offsets are indexed in the _all field: public void testAllMappersWithOffsetsTermQuery() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -130,7 +140,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { // #6187: if _all doesn't index positions then we never use AllTokenStream, even if some fields have boost public void testBoostWithOmitPositions() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -141,7 +151,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { // #6187: if no fields were boosted, we shouldn't use AllTokenStream public void testNoBoost() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/noboost-mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -152,10 +162,10 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testSimpleAllMappersWithReparse() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json"); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping)); String builtMapping = docMapper.mappingSource().string(); // reparse it - DocumentMapper builtDocMapper = parser.parse(builtMapping); + DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = builtDocMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); @@ -170,7 +180,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testSimpleAllMappersWithStore() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/store-mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -187,10 +197,10 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testSimpleAllMappersWithReparseWithStore() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/store-mapping.json"); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping)); String builtMapping = docMapper.mappingSource().string(); // reparse it - DocumentMapper builtDocMapper = parser.parse(builtMapping); + DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = builtDocMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc(); @@ -248,7 +258,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } tv_stored |= tv_positions || tv_payloads || tv_offsets; if (randomBoolean()) { - mappingBuilder.field("similarity", similarity = randomBoolean() ? "BM25" : "TF/IDF"); + mappingBuilder.field("similarity", similarity = "BM25"); } mappingBuilder.endObject(); } @@ -256,10 +266,10 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String mapping = mappingBuilder.endObject().endObject().bytes().toUtf8(); logger.info(mapping); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("test", new CompressedXContent(mapping)); String builtMapping = docMapper.mappingSource().string(); // reparse it - DocumentMapper builtDocMapper = parser.parse(builtMapping); + DocumentMapper builtDocMapper = parser.parse("test", new CompressedXContent(builtMapping)); byte[] json = jsonBuilder().startObject() .field("foo", "bar") @@ -286,7 +296,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } else { assertThat(field, nullValue()); } - if (similarity == null || similarity.equals("TF/IDF")) { + if (similarity == null) { assertThat(builtDocMapper.allFieldMapper().fieldType().similarity(), nullValue()); } else { assertThat(similarity, equalTo(builtDocMapper.allFieldMapper().fieldType().similarity().name())); @@ -303,7 +313,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testMultiField_includeInAllSetToFalse() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/multifield-mapping_include_in_all_set_to_false.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject() @@ -321,7 +331,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testMultiField_defaults() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/multifield-mapping_default.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject() @@ -341,7 +351,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testMisplacedTypeInRoot() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/misplaced_type_in_root.json"); try { - createIndex("test").mapperService().documentMapperParser().parse("test", mapping); + createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters")); @@ -353,7 +363,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testMistypedTypeInRoot() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mistyped_type_in_root.json"); try { - createIndex("test").mapperService().documentMapperParser().parse("test", mapping); + createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters")); @@ -365,7 +375,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testMisplacedMappingAsRoot() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/misplaced_mapping_key_in_root.json"); try { - createIndex("test").mapperService().documentMapperParser().parse("test", mapping); + createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters")); @@ -378,17 +388,17 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { public void testRootObjectMapperPropertiesDoNotCauseException() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_dynamic_template_mapping.json"); - parser.parse("test", mapping); + parser.parse("test", new CompressedXContent(mapping)); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_dynamic_date_formats_mapping.json"); - parser.parse("test", mapping); + parser.parse("test", new CompressedXContent(mapping)); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_date_detection_mapping.json"); - parser.parse("test", mapping); + parser.parse("test", new CompressedXContent(mapping)); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_numeric_detection_mapping.json"); - parser.parse("test", mapping); + parser.parse("test", new CompressedXContent(mapping)); } // issue https://github.com/elasticsearch/elasticsearch/issues/5864 - public void testMetadataMappersStillWorking() { + public void testMetadataMappersStillWorking() throws MapperParsingException, IOException { String mapping = "{"; Map rootTypes = new HashMap<>(); //just pick some example from DocumentMapperParser.rootTypeParsers @@ -401,7 +411,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { mapping += "\"" + key+ "\"" + ":" + rootTypes.get(key) + ",\n"; } mapping += "\"properties\":{}}" ; - createIndex("test").mapperService().documentMapperParser().parse("test", mapping); + createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping)); } public void testDocValuesNotAllowed() throws IOException { @@ -410,7 +420,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { .field("doc_values", true) .endObject().endObject().endObject().string(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); fail(); } catch (MapperParsingException e) { assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values")); @@ -424,7 +434,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().endObject().string(); Settings legacySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); try { - createIndex("test_old", legacySettings).mapperService().documentMapperParser().parse(mapping); + createIndex("test_old", legacySettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); fail(); } catch (MapperParsingException e) { assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values")); @@ -446,22 +456,9 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } } - public void testIncludeInObjectBackcompat() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_all", "foo").endObject().bytes()); - - assertNull(doc.rootDoc().get("_all")); - AllField field = (AllField) doc.rootDoc().getField("_all"); - // the backcompat behavior is actually ignoring directly specifying _all - assertFalse(field.getAllEntries().fields().iterator().hasNext()); - } - public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java index 0bc56b0c30b..308478ad49d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java @@ -20,13 +20,11 @@ package org.elasticsearch.index.mapper.binary; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; @@ -53,7 +51,7 @@ public class BinaryMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field"); assertThat(fieldMapper, instanceOf(BinaryFieldMapper.class)); @@ -70,7 +68,7 @@ public class BinaryMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); // case 1: a simple binary value final byte[] binaryValue1 = new byte[100]; @@ -83,7 +81,7 @@ public class BinaryMappingTests extends ESSingleNodeTestCase { } final byte[] binaryValue2 = out.bytes().toBytes(); assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); - + for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { ParsedDocument doc = mapper.parse("test", "type", "id", XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes()); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java index 05a0a03cc59..b5a54ce92bd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java @@ -19,6 +19,10 @@ package org.elasticsearch.index.mapper.boost; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -27,7 +31,10 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; public class CustomBoostMappingTests extends ESSingleNodeTestCase { - public void testCustomBoostValues() throws Exception { + + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); + + public void testBackCompatCustomBoostValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("s_field").field("type", "string").endObject() .startObject("l_field").field("type", "long").startObject("norms").field("enabled", true).endObject().endObject() @@ -39,7 +46,7 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase { .startObject("date_field").field("type", "date").startObject("norms").field("enabled", true).endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() .startObject("s_field").field("value", "s_value").field("boost", 2.0f).endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java index c9320e2da18..bb5aecd9ec9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java @@ -20,7 +20,11 @@ package org.elasticsearch.index.mapper.boost; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -32,7 +36,10 @@ import static org.hamcrest.Matchers.closeTo; /** */ public class FieldLevelBoostTests extends ESSingleNodeTestCase { - public void testFieldLevelBoost() throws Exception { + + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); + + public void testBackCompatFieldLevelBoost() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("str_field").field("type", "string").endObject() .startObject("int_field").field("type", "integer").startObject("norms").field("enabled", true).endObject().endObject() @@ -44,7 +51,7 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase { .startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() .string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = XContentFactory.jsonBuilder().startObject() .startObject("str_field").field("boost", 2.0).field("value", "some name").endObject() .startObject("int_field").field("boost", 3.0).field("value", 10).endObject() @@ -82,7 +89,7 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase { assertThat((double) f.boost(), closeTo(9.0, 0.001)); } - public void testInvalidFieldLevelBoost() throws Exception { + public void testBackCompatInvalidFieldLevelBoost() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("str_field").field("type", "string").endObject() .startObject("int_field").field("type", "integer").startObject("norms").field("enabled", true).endObject().endObject() @@ -94,7 +101,7 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase { .startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() .string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); try { docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() .startObject("str_field").field("foo", "bar") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java b/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java index 1cfee0dd66e..ea142d6f441 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/camelcase/CamelCaseFieldNameTests.java @@ -44,10 +44,11 @@ public class CamelCaseFieldNameTests extends ESSingleNodeTestCase { assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); + documentMapper = index.mapperService().documentMapper("type"); assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase")); assertNull(documentMapper.mappers().getMapper("this_is_camel_case")); - documentMapper = index.mapperService().documentMapperParser().parse(documentMapper.mappingSource().string()); + documentMapper = index.mapperService().documentMapperParser().parse("type", documentMapper.mappingSource()); assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase")); assertNull(documentMapper.mappers().getMapper("this_is_camel_case")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java index b81a3d6d40d..6de07d8cd0f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/completion/CompletionFieldMapperTests.java @@ -20,18 +20,27 @@ package org.elasticsearch.index.mapper.completion; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.apache.lucene.search.suggest.document.*; +import org.apache.lucene.search.suggest.document.CompletionAnalyzer; +import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; +import org.apache.lucene.search.suggest.document.PrefixCompletionQuery; +import org.apache.lucene.search.suggest.document.RegexCompletionQuery; +import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -39,7 +48,10 @@ import java.io.IOException; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; public class CompletionFieldMapperTests extends ESSingleNodeTestCase { public void testDefaultConfiguration() throws IOException { @@ -49,7 +61,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); @@ -82,7 +94,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); @@ -117,7 +129,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); @@ -142,7 +154,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -150,7 +162,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .field("completion", "suggestion") .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 1); } @@ -161,7 +173,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -169,7 +181,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .array("completion", "suggestion1", "suggestion2") .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 2); } @@ -180,7 +192,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -191,7 +203,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 1); } @@ -202,7 +214,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -213,7 +225,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 3); } @@ -224,7 +236,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -245,7 +257,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 3); } @@ -256,7 +268,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() @@ -277,7 +289,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 6); } @@ -288,7 +300,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); try { defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() @@ -314,7 +326,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); CharsRefBuilder charsRefBuilder = new CharsRefBuilder(); charsRefBuilder.append("sugg"); charsRefBuilder.setCharAt(2, '\u001F'); @@ -367,7 +379,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType().prefixQuery(new BytesRef("co")); @@ -381,7 +393,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType().fuzzyQuery("co", @@ -398,7 +410,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java index 4dc017aa6bd..fa7bbf8f249 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java @@ -19,6 +19,10 @@ package org.elasticsearch.index.mapper.compound; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -28,14 +32,17 @@ import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; public class CompoundTypesTests extends ESSingleNodeTestCase { - public void testStringType() throws Exception { + + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); + + public void testBackCompatStringType() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "string").endObject() .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java index d94ae2b6735..daf54d501d7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.copyto; import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -31,7 +32,7 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; @@ -128,6 +129,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { assertNotNull(parsedDoc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type1").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("type1"); fieldMapper = docMapper.mappers().getMapper("new_field"); assertThat(fieldMapper, instanceOf(LongFieldMapper.class)); } @@ -151,7 +153,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") @@ -177,7 +179,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") @@ -213,7 +215,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") @@ -242,7 +244,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") @@ -276,7 +278,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") @@ -309,29 +311,15 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapperBefore = parser.parse(mappingBefore); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), true, false); - List fields = docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields(); + assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); - assertThat(fields.size(), equalTo(2)); - assertThat(fields.get(0), equalTo("foo")); - assertThat(fields.get(1), equalTo("bar")); + DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter), false, false); - - DocumentMapper docMapperAfter = parser.parse(mappingAfter); - - MergeResult mergeResult = docMapperBefore.merge(docMapperAfter.mapping(), true, false); - - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); - - docMapperBefore.merge(docMapperAfter.mapping(), false, false); - - fields = docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields(); - - assertThat(fields.size(), equalTo(2)); - assertThat(fields.get(0), equalTo("baz")); - assertThat(fields.get(1), equalTo("bar")); + assertEquals(Arrays.asList("baz", "bar"), docMapperAfter.mappers().getMapper("copy_test").copyTo().copyToFields()); + assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); } public void testCopyToNestedField() throws Exception { @@ -376,7 +364,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { } mapping = mapping.endObject(); - DocumentMapper mapper = parser.parse(mapping.string()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); XContentBuilder jsonDoc = XContentFactory.jsonBuilder() .startObject() @@ -456,7 +444,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BinaryFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BinaryFieldTypeTests.java index 7ab78864fb7..be5c85edf72 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/BinaryFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BinaryFieldTypeTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.core; import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; -import org.junit.Before; public class BinaryFieldTypeTests extends FieldTypeTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java index 53b930b87f8..3aa04ba0f01 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java @@ -28,6 +28,8 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -57,7 +59,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "boolean").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -86,7 +88,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "boolean").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); FieldMapper mapper = defaultMapper.mappers().getMapper("field"); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -102,11 +104,34 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - defaultMapper = parser.parse(mapping); + defaultMapper = parser.parse("type", new CompressedXContent(mapping)); mapper = defaultMapper.mappers().getMapper("field"); builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); assertEquals("{\"field\":{\"type\":\"boolean\",\"doc_values\":false,\"null_value\":true}}", builder.string()); } + + public void testMultiFields() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "boolean") + .startObject("fields") + .startObject("as_string") + .field("type", "string") + .field("index", "not_analyzed") + .endObject() + .endObject() + .endObject().endObject() + .endObject().endObject().string(); + DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), true, false); + assertEquals(mapping, mapper.mappingSource().toString()); + BytesReference source = XContentFactory.jsonBuilder() + .startObject() + .field("field", false) + .endObject().bytes(); + ParsedDocument doc = mapper.parse("test", "type", "1", source); + assertNotNull(doc.rootDoc().getField("field.as_string")); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java index abca5595537..facc1eb41ad 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.core; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -113,13 +112,10 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase { .startObject("test") .startObject("properties") .startObject("foo") - .field("type", "multi_field") + .field("type", "string") + .field("store", storeCountedFields) + .field("analyzer", "simple") .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("store", storeCountedFields) - .field("analyzer", "simple") - .endObject() .startObject("token_count") .field("type", "token_count") .field("analyzer", "standard") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java index ba9303e8b58..a746717b73a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java @@ -19,11 +19,16 @@ package org.elasticsearch.index.mapper.core; -import org.apache.lucene.analysis.*; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CannedTokenStream; +import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.analysis.Token; +import org.apache.lucene.analysis.TokenStream; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MergeResult; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; @@ -46,8 +51,8 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper stage1 = parser.parse(stage1Mapping); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), true, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject() .startObject("person") @@ -58,17 +63,12 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject().string(); - DocumentMapper stage2 = parser.parse(stage2Mapping); + DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), false, false); - MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); - // Just simulated so merge hasn't happened yet + // previous mapper has not been modified assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword")); - - mergeResult = stage1.merge(stage2.mapping(), false, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); - // Just simulated so merge hasn't happened yet - assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard")); + // but the new one has the change + assertThat(((TokenCountFieldMapper) stage2.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard")); } public void testCountPositions() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java deleted file mode 100644 index 8ddfc3a2ae7..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper.date; - -import org.elasticsearch.Version; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.junit.Before; - -import java.util.Arrays; -import java.util.List; - -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.VersionUtils.randomVersionBetween; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.is; - -/** - * Test class to check for all the conditions defined in - * https://github.com/elastic/elasticsearch/issues/10971 - */ -public class DateBackwardsCompatibilityTests extends ESSingleNodeTestCase { - - private String index = "testindex"; - private String type = "testtype"; - private Version randomVersionBelow2x; - - @Before - public void setup() throws Exception { - randomVersionBelow2x = randomVersionBetween(getRandom(), Version.V_0_90_0, Version.V_1_6_1); - } - - public void testThatPre2xIndicesNumbersAreTreatedAsEpochs() throws Exception { - index = createPre2xIndexAndMapping(); - long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015 - XContentBuilder document = jsonBuilder().startObject().field("date_field", dateInMillis).endObject(); - index(document); - - // search for date in time range - QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24"); - SearchResponse response = client().prepareSearch(index).setQuery(query).get(); - assertHitCount(response, 1); - } - - public void testThatPre2xFailedStringParsingLeadsToEpochParsing() throws Exception { - index = createPre2xIndexAndMapping(); - long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015 - String date = String.valueOf(dateInMillis); - XContentBuilder document = jsonBuilder().startObject().field("date_field", date).endObject(); - index(document); - - // search for date in time range - QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24"); - SearchResponse response = client().prepareSearch(index).setQuery(query).get(); - assertHitCount(response, 1); - } - - public void testThatPre2xSupportsUnixTimestampsInAnyDateFormat() throws Exception { - long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015 - List dateFormats = Arrays.asList("dateOptionalTime", "weekDate", "tTime", "ordinalDate", "hourMinuteSecond", "hourMinute"); - - for (String format : dateFormats) { - XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") - .startObject("date_field").field("type", "date").field("format", format).endObject() - .endObject().endObject(); - - index = createIndex(randomVersionBelow2x, mapping); - - XContentBuilder document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", String.valueOf(dateInMillis)) - .endObject(); - index(document); - - // indexing as regular timestamp should work as well - document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", dateInMillis) - .endObject(); - index(document); - - client().admin().indices().prepareDelete(index).get(); - } - } - - public void testThatPre2xIndicesNumbersAreTreatedAsTimestamps() throws Exception { - // looks like a unix time stamp but is meant as 2016-06-23T01:00:00.000 - see the specified date format - long date = 2015062301000l; - - XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") - .startObject("date_field").field("type", "date").field("format","yyyyMMddHHSSS").endObject() - .endObject().endObject(); - index = createIndex(randomVersionBelow2x, mapping); - - XContentBuilder document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", randomBoolean() ? String.valueOf(date) : date) - .endObject(); - index(document); - - // no results in expected time range - QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24").format("dateOptionalTime"); - SearchResponse response = client().prepareSearch(index).setQuery(query).get(); - assertNoSearchHits(response); - - // result in unix timestamp range - QueryBuilder timestampQuery = QueryBuilders.rangeQuery("date_field").from(2015062300000L).to(2015062302000L); - assertHitCount(client().prepareSearch(index).setQuery(timestampQuery).get(), 1); - - // result should also work with regular specified dates - QueryBuilder regularTimeQuery = QueryBuilders.rangeQuery("date_field").from("2033-11-08").to("2033-11-09").format("dateOptionalTime"); - assertHitCount(client().prepareSearch(index).setQuery(regularTimeQuery).get(), 1); - } - - public void testThatPost2xIndicesNumbersAreTreatedAsStrings() throws Exception { - // looks like a unix time stamp but is meant as 2016-06-23T01:00:00.000 - see the specified date format - long date = 2015062301000l; - - XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") - .startObject("date_field").field("type", "date").field("format","yyyyMMddHHSSS").endObject() - .endObject().endObject(); - index = createIndex(Version.CURRENT, mapping); - - XContentBuilder document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", String.valueOf(date)) - .endObject(); - index(document); - - document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", date) - .endObject(); - index(document); - - // search for date in time range - QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24").format("dateOptionalTime"); - SearchResponse response = client().prepareSearch(index).setQuery(query).get(); - assertHitCount(response, 2); - } - - public void testDynamicDateDetectionIn2xDoesNotSupportEpochs() throws Exception { - try { - XContentBuilder mapping = jsonBuilder().startObject() - .startArray("dynamic_date_formats").value("dateOptionalTime").value("epoch_seconds").endArray() - .endObject(); - createIndex(Version.CURRENT, mapping); - fail("Expected a MapperParsingException, but did not happen"); - } catch (MapperParsingException e) { - assertThat(e.getMessage(), containsString("Failed to parse mapping [" + type + "]")); - assertThat(e.getMessage(), containsString("Epoch [epoch_seconds] is not supported as dynamic date format")); - } - } - - private String createPre2xIndexAndMapping() throws Exception { - return createIndexAndMapping(randomVersionBelow2x); - } - - private String createIndexAndMapping(Version version) throws Exception { - XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") - .startObject("date_field").field("type", "date").field("format", "dateOptionalTime").endObject() - .endObject().endObject(); - - return createIndex(version, mapping); - } - - private String createIndex(Version version, XContentBuilder mapping) { - Settings settings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - createIndex(index, settings, type, mapping); - - ensureGreen(index); - return index; - } - - private void index(XContentBuilder document) { - IndexResponse indexResponse = client().prepareIndex(index, type).setSource(document).setRefresh(true).get(); - assertThat(indexResponse.isCreated(), is(true)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index fb67401e334..f9531c3aff3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.LocaleUtils; @@ -39,8 +40,12 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; @@ -53,12 +58,20 @@ import org.joda.time.DateTimeZone; import org.junit.Before; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.index.mapper.string.SimpleStringMappingTests.docValuesType; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class SimpleDateMappingTests extends ESSingleNodeTestCase { @@ -67,7 +80,9 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { .startObject("properties").endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = mapper("test", "type", mapping); + IndexService index = createIndex("test"); + client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); + DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -81,6 +96,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); + defaultMapper = index.mapperService().documentMapper("type"); FieldMapper fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field1"); assertThat(fieldMapper, instanceOf(DateFieldMapper.class)); DateFieldMapper dateFieldMapper = (DateFieldMapper)fieldMapper; @@ -324,7 +340,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { // Unless the global ignore_malformed option is set to true Settings indexSettings = settingsBuilder().put("index.mapping.ignore_malformed", true).build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse(mapping); + defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field3", "a") @@ -371,9 +387,8 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { Map config = getConfigurationViaXContent(initialDateFieldMapper); assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy")); - MergeResult mergeResult = defaultMapper.merge(mergeMapper.mapping(), false, false); + defaultMapper = defaultMapper.merge(mergeMapper.mapping(), false); - assertThat("Merging resulting in conflicts: " + Arrays.asList(mergeResult.buildConflicts()), mergeResult.hasConflicts(), is(false)); assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(DateFieldMapper.class))); DateFieldMapper mergedFieldMapper = (DateFieldMapper) defaultMapper.mappers().getMapper("field"); @@ -418,51 +433,6 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { throw new AssertionError("missing"); } - public void testNumericResolutionBackwardsCompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").field("format", "date_time").field("numeric_resolution", "seconds").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test1", "type", mapping, Version.V_0_90_0); - - // provided as an int - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", 42) - .endObject() - .bytes()); - assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(42000L)); - - // provided as a string - doc = defaultMapper.parse("test", "type", "2", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "43") - .endObject() - .bytes()); - assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(43000L)); - - // but formatted dates still parse as milliseconds - doc = defaultMapper.parse("test", "type", "2", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "1970-01-01T00:00:44.000Z") - .endObject() - .bytes()); - assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(44000L)); - - // expected to fail due to field epoch date formatters not being set - DocumentMapper currentMapper = mapper("test2", "type", mapping); - try { - currentMapper.parse("test", "type", "2", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", randomBoolean() ? "43" : 43) - .endObject() - .bytes()); - fail("expected parse failure"); - } catch (MapperParsingException e) { - assertTrue(e.getMessage(), e.getMessage().contains("failed to parse [date_field]")); - } - } - public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("date_field").field("type", "date").field("format", "yyyyMMddHH").endObject().endObject() @@ -490,31 +460,6 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { assertThat(indexResponse.isCreated(), is(true)); } - public void testThatOlderIndicesAllowNonStrictDates() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); - - Version randomVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_0_90_0, Version.V_1_6_1); - IndexService index = createIndex("test", settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersion).build()); - client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); - assertDateFormat("epoch_millis||date_optional_time"); - DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "1-1-1T00:00:44.000Z") - .endObject() - .bytes()); - - // also test normal date - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2015-06-06T00:00:44.000Z") - .endObject() - .bytes()); - } - public void testThatNewIndicesOnlyAllowStrictDates() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() @@ -544,34 +489,6 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { } } - public void testThatUpgradingAnOlderIndexToStrictDateWorks() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").field("format", "date_optional_time").endObject().endObject() - .endObject().endObject().string(); - - Version randomVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_0_90_0, Version.V_1_6_1); - createIndex("test", settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersion).build()); - client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); - assertDateFormat("epoch_millis||date_optional_time"); - - // index doc - client().prepareIndex("test", "type", "1").setSource(XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2015-06-06T00:00:44.000Z") - .endObject()).get(); - - // update mapping - String newMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field") - .field("type", "date") - .field("format", "strict_date_optional_time||epoch_millis") - .endObject().endObject().endObject().endObject().string(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource(newMapping).get(); - assertThat(putMappingResponse.isAcknowledged(), is(true)); - - assertDateFormat("strict_date_optional_time||epoch_millis"); - } - private void assertDateFormat(String expectedFormat) throws IOException { GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").setTypes("type").get(); Map mappingMap = response.getMappings().get("test").get("type").getSourceAsMap(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java index d07e6177814..da5c53f46f9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java @@ -44,6 +44,7 @@ public class GenericStoreDynamicTemplateTests extends ESSingleNodeTestCase { byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-data.json"); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java index 829730e68cd..75dd396d8dd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java @@ -44,6 +44,7 @@ public class PathMatchDynamicTemplateTests extends ESSingleNodeTestCase { byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-data.json"); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java index 014f0295808..250b7a8d28a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java @@ -55,6 +55,7 @@ public class SimpleDynamicTemplatesTests extends ESSingleNodeTestCase { ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", builder.bytes()); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("person"); DocumentFieldMappers mappers = docMapper.mappers(); assertThat(mappers.smartNameFieldMapper("s"), Matchers.notNullValue()); @@ -74,6 +75,7 @@ public class SimpleDynamicTemplatesTests extends ESSingleNodeTestCase { byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); @@ -130,6 +132,7 @@ public class SimpleDynamicTemplatesTests extends ESSingleNodeTestCase { byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); + docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index e5d08db8d9f..c4b04000eb6 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.externalvalues; import com.spatial4j.core.shape.Point; - import org.apache.lucene.document.Field; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; @@ -29,15 +28,14 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.builders.ShapeBuilders; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.BinaryFieldMapper; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy; @@ -82,7 +80,7 @@ public class ExternalMapper extends FieldMapper { private String mapperName; public Builder(String name, String generatedValue, String mapperName) { - super(name, new ExternalFieldType()); + super(name, new ExternalFieldType(), new ExternalFieldType()); this.builder = this; this.stringBuilder = stringField(name).store(false); this.generatedValue = generatedValue; @@ -96,9 +94,6 @@ public class ExternalMapper extends FieldMapper { @Override public ExternalMapper build(BuilderContext context) { - ContentPath.Type origPathType = context.path().pathType(); - context.path().pathType(ContentPath.Type.FULL); - context.path().add(name); BinaryFieldMapper binMapper = binBuilder.build(context); BooleanFieldMapper boolMapper = boolBuilder.build(context); @@ -108,7 +103,6 @@ public class ExternalMapper extends FieldMapper { FieldMapper stringMapper = (FieldMapper)stringBuilder.build(context); context.path().remove(); - context.path().pathType(origPathType); setupFieldType(context); return new ExternalMapper(name, fieldType, generatedValue, mapperName, binMapper, boolMapper, pointMapper, shapeMapper, stringMapper, @@ -167,11 +161,11 @@ public class ExternalMapper extends FieldMapper { private final String generatedValue; private final String mapperName; - private final BinaryFieldMapper binMapper; - private final BooleanFieldMapper boolMapper; - private final BaseGeoPointFieldMapper pointMapper; - private final GeoShapeFieldMapper shapeMapper; - private final FieldMapper stringMapper; + private BinaryFieldMapper binMapper; + private BooleanFieldMapper boolMapper; + private BaseGeoPointFieldMapper pointMapper; + private GeoShapeFieldMapper shapeMapper; + private FieldMapper stringMapper; public ExternalMapper(String simpleName, MappedFieldType fieldType, String generatedValue, String mapperName, @@ -219,10 +213,40 @@ public class ExternalMapper extends FieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // ignore this for now } + @Override + public FieldMapper updateFieldType(Map fullNameToFieldType) { + ExternalMapper update = (ExternalMapper) super.updateFieldType(fullNameToFieldType); + MultiFields multiFieldsUpdate = multiFields.updateFieldType(fullNameToFieldType); + BinaryFieldMapper binMapperUpdate = (BinaryFieldMapper) binMapper.updateFieldType(fullNameToFieldType); + BooleanFieldMapper boolMapperUpdate = (BooleanFieldMapper) boolMapper.updateFieldType(fullNameToFieldType); + GeoPointFieldMapper pointMapperUpdate = (GeoPointFieldMapper) pointMapper.updateFieldType(fullNameToFieldType); + GeoShapeFieldMapper shapeMapperUpdate = (GeoShapeFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType); + StringFieldMapper stringMapperUpdate = (StringFieldMapper) stringMapper.updateFieldType(fullNameToFieldType); + if (update == this + && multiFieldsUpdate == multiFields + && binMapperUpdate == binMapper + && boolMapperUpdate == boolMapper + && pointMapperUpdate == pointMapper + && shapeMapperUpdate == shapeMapper + && stringMapperUpdate == stringMapper) { + return this; + } + if (update == this) { + update = (ExternalMapper) clone(); + } + update.multiFields = multiFieldsUpdate; + update.binMapper = binMapperUpdate; + update.boolMapper = boolMapperUpdate; + update.pointMapper = pointMapperUpdate; + update.shapeMapper = shapeMapperUpdate; + update.stringMapper = stringMapperUpdate; + return update; + } + @Override public Iterator iterator() { return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java index dae8bc67fda..9223b640024 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java @@ -24,11 +24,9 @@ import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.StringField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; @@ -47,7 +45,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { private static MappedFieldType FIELD_TYPE = new BooleanFieldMapper.BooleanFieldType(); static { - FIELD_TYPE.setNames(new MappedFieldType.Names(FIELD_NAME)); + FIELD_TYPE.setName(FIELD_NAME); FIELD_TYPE.freeze(); } @@ -55,23 +53,11 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { super(FIELD_NAME, FIELD_TYPE, FIELD_TYPE, indexSettings); } - @Override - public String name() { - return CONTENT_TYPE; - } - @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { // handled in post parse } - @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { - if (!(mergeWith instanceof ExternalMetadataMapper)) { - mergeResult.addConflict("Trying to merge " + mergeWith + " with " + this); - } - } - @Override public Iterator iterator() { return Collections.emptyIterator(); @@ -99,14 +85,14 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { public static class Builder extends MetadataFieldMapper.Builder { protected Builder() { - super(CONTENT_TYPE, FIELD_TYPE); + super(FIELD_NAME, FIELD_TYPE, FIELD_TYPE); } @Override public ExternalMetadataMapper build(BuilderContext context) { return new ExternalMetadataMapper(context.indexSettings()); } - + } public static class TypeParser implements MetadataFieldMapper.TypeParser { @@ -120,7 +106,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) { return new ExternalMetadataMapper(indexSettings); } - + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java index 4cf7b405217..7e519c3b722 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java @@ -87,7 +87,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { .startObject("f") .field("type", ExternalMapperPlugin.EXTERNAL_UPPER) .startObject("fields") - .startObject("f") + .startObject("g") .field("type", "string") .field("store", "yes") .startObject("fields") @@ -107,7 +107,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { refresh(); SearchResponse response = client().prepareSearch("test-idx") - .setQuery(QueryBuilders.termQuery("f.f.raw", "FOO BAR")) + .setQuery(QueryBuilders.termQuery("f.g.raw", "FOO BAR")) .execute().actionGet(); assertThat(response.getHits().totalHits(), equalTo((long) 1)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java index 24449015a21..bf3196fdcf7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.externalvalues; import org.apache.lucene.util.GeoUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -55,7 +56,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), indexService.analysisService(), indexService.similarityService(), mapperRegistry); - DocumentMapper documentMapper = parser.parse( + DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( XContentFactory.jsonBuilder().startObject().startObject("type") .startObject(ExternalMetadataMapper.CONTENT_TYPE) .endObject() @@ -63,7 +64,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { .startObject("field").field("type", "external").endObject() .endObject() .endObject().endObject().string() - ); + )); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -102,7 +103,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), indexService.analysisService(), indexService.similarityService(), mapperRegistry); - DocumentMapper documentMapper = parser.parse( + DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("field") .field("type", ExternalMapperPlugin.EXTERNAL) @@ -121,7 +122,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject().endObject() - .string()); + .string())); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -161,7 +162,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), indexService.analysisService(), indexService.similarityService(), mapperRegistry); - DocumentMapper documentMapper = parser.parse( + DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("field") .field("type", ExternalMapperPlugin.EXTERNAL) @@ -183,7 +184,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject().endObject() - .string()); + .string())); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 93fd71599c4..758e5a38294 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -33,14 +34,12 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.test.geo.RandomGeoGenerator; -import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.Map; @@ -49,7 +48,6 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -61,7 +59,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -71,7 +69,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { boolean indexCreatedBefore22 = version.before(Version.V_2_2_0); assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - final boolean stored = indexCreatedBefore22 == false; + final boolean stored = false; assertThat(doc.rootDoc().getField("point.lat").fieldType().stored(), is(stored)); assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); assertThat(doc.rootDoc().getField("point.lon").fieldType().stored(), is(stored)); @@ -91,7 +89,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -111,7 +109,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -131,7 +129,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -151,7 +149,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -175,7 +173,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject(); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -224,7 +222,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { mapping.field("ignore_malformed", false).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -287,7 +285,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -327,7 +325,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -354,7 +352,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -390,7 +388,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -414,7 +412,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -440,7 +438,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -476,7 +474,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -501,7 +499,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -525,7 +523,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -551,7 +549,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -594,7 +592,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("validate", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(validateMapping); + parser.parse("type", new CompressedXContent(validateMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]"); @@ -605,7 +603,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("validate_lat", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(validateMapping); + parser.parse("type", new CompressedXContent(validateMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]"); @@ -616,7 +614,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("validate_lon", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(validateMapping); + parser.parse("type", new CompressedXContent(validateMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]"); @@ -628,7 +626,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("normalize", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(normalizeMapping); + parser.parse("type", new CompressedXContent(normalizeMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]"); @@ -639,7 +637,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("normalize_lat", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(normalizeMapping); + parser.parse("type", new CompressedXContent(normalizeMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]"); @@ -650,67 +648,13 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .field("normalize_lon", true).endObject().endObject() .endObject().endObject().string(); - parser.parse(normalizeMapping); + parser.parse("type", new CompressedXContent(normalizeMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]"); } } - /** - * Test backward compatibility - */ - public void testBackwardCompatibleOptions() throws Exception { - // backward compatibility testing - Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, - Version.V_1_7_1)).build(); - - // validate - DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("validate", false).endObject().endObject() - .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true")); - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("validate_lat", false).endObject().endObject() - .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true")); - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("validate_lon", false).endObject().endObject() - .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true")); - - // normalize - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("normalize", true).endObject().endObject() - .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true")); - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("normalize_lat", true).endObject().endObject() - .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true")); - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("normalize_lon", true).endObject().endObject() - .endObject().endObject().string(); - parser.parse(mapping); - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true")); - } - public void testGeoPointMapperMerge() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); @@ -791,4 +735,32 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { assertEquals("dr5regy6rc6y".substring(0, numHashes-i), hashes.get(i)); } } + + public void testMultiField() throws Exception { + int numDocs = randomIntBetween(10, 100); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") + .field("type", "geo_point").startObject("fields") + .startObject("geohash").field("type", "geo_point").field("geohash_precision", 12).field("geohash_prefix", true).endObject() + .startObject("latlon").field("type", "geo_point").field("lat_lon", true).endObject().endObject() + .endObject().endObject().endObject().endObject().string(); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test") + .addMapping("pin", mapping); + mappingRequest.execute().actionGet(); + + // create index and add random test points + client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); + for (int i=0; i parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - return new MetadataFieldMapper.Builder("_dummy", FIELD_TYPE) { + return new MetadataFieldMapper.Builder("_dummy", FIELD_TYPE, FIELD_TYPE) { @Override public DummyMetadataFieldMapper build(BuilderContext context) { return new DummyMetadataFieldMapper(context.indexSettings()); @@ -245,7 +203,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { static { FIELD_TYPE.setTokenized(false); FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - FIELD_TYPE.setNames(new MappedFieldType.Names("_dummy")); + FIELD_TYPE.setName("_dummy"); FIELD_TYPE.freeze(); } @@ -282,7 +240,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService, indexService.analysisService(), indexService.similarityService(), mapperRegistry); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDocument = mapper.parse("index", "type", "id", new BytesArray("{}")); IndexableField[] fields = parsedDocument.rootDoc().getFields(FieldNamesFieldMapper.NAME); boolean found = false; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java index 879c6590751..0d52b66dfb6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java @@ -42,12 +42,12 @@ public class ParentFieldMapperTests extends ESTestCase { ParentFieldMapper parentFieldMapper = builder.build(new Mapper.BuilderContext(post2Dot0IndexSettings(), new ContentPath(0))); - assertThat(parentFieldMapper.getParentJoinFieldType().names().indexName(), equalTo("_parent#child")); + assertThat(parentFieldMapper.getParentJoinFieldType().name(), equalTo("_parent#child")); assertThat(parentFieldMapper.getParentJoinFieldType().fieldDataType(), nullValue()); assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - assertThat(parentFieldMapper.getChildJoinFieldType().names().indexName(), equalTo("_parent#parent")); + assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent")); assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.LAZY)); assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); @@ -60,12 +60,12 @@ public class ParentFieldMapperTests extends ESTestCase { ParentFieldMapper parentFieldMapper = builder.build(new Mapper.BuilderContext(post2Dot0IndexSettings(), new ContentPath(0))); - assertThat(parentFieldMapper.getParentJoinFieldType().names().indexName(), equalTo("_parent#child")); + assertThat(parentFieldMapper.getParentJoinFieldType().name(), equalTo("_parent#child")); assertThat(parentFieldMapper.getParentJoinFieldType().fieldDataType(), nullValue()); assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - assertThat(parentFieldMapper.getChildJoinFieldType().names().indexName(), equalTo("_parent#parent")); + assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent")); assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER)); assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); @@ -78,12 +78,12 @@ public class ParentFieldMapperTests extends ESTestCase { ParentFieldMapper parentFieldMapper = builder.build(new Mapper.BuilderContext(post2Dot0IndexSettings(), new ContentPath(0))); - assertThat(parentFieldMapper.getParentJoinFieldType().names().indexName(), equalTo("_parent#child")); + assertThat(parentFieldMapper.getParentJoinFieldType().name(), equalTo("_parent#child")); assertThat(parentFieldMapper.getParentJoinFieldType().fieldDataType(), nullValue()); assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - assertThat(parentFieldMapper.getChildJoinFieldType().names().indexName(), equalTo("_parent#parent")); + assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent")); assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER_GLOBAL_ORDINALS)); assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java index 105b3b446ce..309fa274919 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.internal; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; @@ -31,7 +32,7 @@ public class TypeFieldMapperTests extends ESSingleNodeTestCase { public void testDocValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); TypeFieldMapper typeMapper = docMapper.metadataMapper(TypeFieldMapper.class); assertTrue(typeMapper.fieldType().hasDocValues()); } @@ -41,7 +42,7 @@ public class TypeFieldMapperTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); Settings bwcSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0_beta1.id).build(); - DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); TypeFieldMapper typeMapper = docMapper.metadataMapper(TypeFieldMapper.class); assertFalse(typeMapper.fieldType().hasDocValues()); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java index 4245641fd82..82a8918c66b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.ip; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; @@ -42,7 +43,7 @@ public class SimpleIpMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("ip").field("type", "ip").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -82,7 +83,7 @@ public class SimpleIpMappingTests extends ESSingleNodeTestCase { .field("ignore_malformed", false).endObject().startObject("field3").field("type", "ip").endObject().endObject().endObject() .endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field1", "").field("field2", "10.20.30.40").endObject().bytes()); @@ -104,7 +105,7 @@ public class SimpleIpMappingTests extends ESSingleNodeTestCase { // Unless the global ignore_malformed option is set to true Settings indexSettings = settingsBuilder().put("index.mapping.ignore_malformed", true).build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse(mapping); + defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field3", "").endObject().bytes()); assertThat(doc.rootDoc().getField("field3"), nullValue()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java index 656599c5036..d171430dfff 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java @@ -59,6 +59,7 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase { .bytes()); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); + mapper = index.mapperService().documentMapper("type"); writer.addDocument(doc.rootDoc()); writer.addDocument(doc.rootDoc()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java index d67b97c3d85..89e6630ffa2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Numbers; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; @@ -60,7 +61,7 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { .endObject() .endObject() .string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index 1a66879c448..80f7942bbcc 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; @@ -39,6 +38,7 @@ import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -51,41 +51,35 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .startObject("name").field("type", "string").endObject() .endObject().endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper stage1 = parser.parse(stage1Mapping); + DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping)); String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("name").field("type", "string").endObject() .startObject("age").field("type", "integer").endObject() .startObject("obj1").startObject("properties").startObject("prop1").field("type", "integer").endObject().endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper stage2 = parser.parse(stage2Mapping); + DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping)); - MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); - // since we are simulating, we should not have the age mapping + DocumentMapper merged = stage1.merge(stage2.mapping(), false); + // stage1 mapping should not have been modified assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue()); assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue()); - // now merge, don't simulate - mergeResult = stage1.merge(stage2.mapping(), false, false); - // there is still merge failures - assertThat(mergeResult.hasConflicts(), equalTo(false)); - // but we have the age in - assertThat(stage1.mappers().smartNameFieldMapper("age"), notNullValue()); - assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue()); + // but merged should + assertThat(merged.mappers().smartNameFieldMapper("age"), notNullValue()); + assertThat(merged.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue()); } public void testMergeObjectDynamic() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").endObject().endObject().string(); - DocumentMapper mapper = parser.parse(objectMapping); + DocumentMapper mapper = parser.parse("type1", new CompressedXContent(objectMapping)); assertNull(mapper.root().dynamic()); String withDynamicMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").field("dynamic", "false").endObject().endObject().string(); - DocumentMapper withDynamicMapper = parser.parse(withDynamicMapping); + DocumentMapper withDynamicMapper = parser.parse("type1", new CompressedXContent(withDynamicMapping)); assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); - MergeResult mergeResult = mapper.merge(withDynamicMapper.mapping(), false, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); - assertThat(mapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); + DocumentMapper merged = mapper.merge(withDynamicMapper.mapping(), false); + assertThat(merged.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); } public void testMergeObjectAndNested() throws Exception { @@ -93,20 +87,25 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("obj").field("type", "object").endObject() .endObject().endObject().endObject().string(); - DocumentMapper objectMapper = parser.parse(objectMapping); + DocumentMapper objectMapper = parser.parse("type1", new CompressedXContent(objectMapping)); String nestedMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("obj").field("type", "nested").endObject() .endObject().endObject().endObject().string(); - DocumentMapper nestedMapper = parser.parse(nestedMapping); + DocumentMapper nestedMapper = parser.parse("type1", new CompressedXContent(nestedMapping)); - MergeResult mergeResult = objectMapper.merge(nestedMapper.mapping(), true, false); - assertThat(mergeResult.hasConflicts(), equalTo(true)); - assertThat(mergeResult.buildConflicts().length, equalTo(1)); - assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from non-nested to nested")); + try { + objectMapper.merge(nestedMapper.mapping(), false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from non-nested to nested")); + } - mergeResult = nestedMapper.merge(objectMapper.mapping(), true, false); - assertThat(mergeResult.buildConflicts().length, equalTo(1)); - assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from nested to non-nested")); + try { + nestedMapper.merge(objectMapper.mapping(), false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from nested to non-nested")); + } } public void testMergeSearchAnalyzer() throws Exception { @@ -118,18 +117,17 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "keyword").endObject().endObject() .endObject().endObject().string(); - DocumentMapper existing = parser.parse(mapping1); - DocumentMapper changed = parser.parse(mapping2); + DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1)); + DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2)); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); - MergeResult mergeResult = existing.merge(changed.mapping(), false, false); + DocumentMapper merged = existing.merge(changed.mapping(), false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword")); + assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword")); } public void testChangeSearchAnalyzerToDefault() throws Exception { - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + MapperService mapperService = createIndex("test").mapperService(); String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject() .endObject().endObject().string(); @@ -137,15 +135,13 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("ignore_above", 14).endObject().endObject() .endObject().endObject().string(); - DocumentMapper existing = parser.parse(mapping1); - DocumentMapper changed = parser.parse(mapping2); + DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), true, false); + DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mapping2), false, false); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); - MergeResult mergeResult = existing.merge(changed.mapping(), false, false); - assertThat(mergeResult.hasConflicts(), equalTo(false)); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard")); - assertThat(((StringFieldMapper) (existing.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14)); + assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard")); + assertThat(((StringFieldMapper) (merged.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14)); } public void testConcurrentMergeTest() throws Throwable { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index 58fa8fd69b0..b9d157fbb4c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -26,14 +26,23 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.core.*; +import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.VersionUtils; @@ -45,19 +54,19 @@ import java.util.Map; import java.util.TreeMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.mapper.MapperBuilders.*; +import static org.elasticsearch.index.mapper.MapperBuilders.doc; +import static org.elasticsearch.index.mapper.MapperBuilders.rootObject; +import static org.elasticsearch.index.mapper.MapperBuilders.stringField; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; /** * */ public class MultiFieldTests extends ESSingleNodeTestCase { - public void testMultiFieldMultiFieldType() throws Exception { - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type.json"); - testMultiField(mapping); - } public void testMultiFieldMultiFields() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-fields.json"); @@ -65,7 +74,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { } private void testMultiField(String mapping) throws Exception { - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); @@ -137,19 +146,17 @@ public class MultiFieldTests extends ESSingleNodeTestCase { public void testBuildThenParse() throws Exception { IndexService indexService = createIndex("test"); - Settings settings = indexService.getIndexSettings().getSettings(); - DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - DocumentMapper builderDocMapper = doc(settings, rootObject("person").add( + DocumentMapper builderDocMapper = doc(rootObject("person").add( stringField("name").store(true) .addMultiField(stringField("indexed").index(true).tokenized(true)) .addMultiField(stringField("not_indexed").index(false).store(true)) - ), indexService.mapperService()).build(indexService.mapperService(), mapperParser); + ), indexService.mapperService()).build(indexService.mapperService()); String builtMapping = builderDocMapper.mappingSource().string(); // System.out.println(builtMapping); // reparse it - DocumentMapper docMapper = mapperParser.parse(builtMapping); + DocumentMapper docMapper = indexService.mapperService().documentMapperParser().parse("person", new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); @@ -175,261 +182,6 @@ public class MultiFieldTests extends ESSingleNodeTestCase { assertEquals(IndexOptions.NONE, f.fieldType().indexOptions()); } - public void testConvertMultiFieldNoDefaultField() throws Exception { - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); - BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - - assertNull(doc.getField("name")); - IndexableField f = doc.getField("name.indexed"); - assertThat(f.name(), equalTo("name.indexed")); - assertThat(f.stringValue(), equalTo("some name")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("name.not_indexed"); - assertThat(f.name(), equalTo("name.not_indexed")); - assertThat(f.stringValue(), equalTo("some name")); - assertThat(f.fieldType().stored(), equalTo(true)); - assertEquals(IndexOptions.NONE, f.fieldType().indexOptions()); - - assertThat(docMapper.mappers().getMapper("name"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name"), instanceOf(StringFieldMapper.class)); - assertEquals(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("name").fieldType().tokenized(), equalTo(true)); - - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.indexed"), instanceOf(StringFieldMapper.class)); - assertNotNull(docMapper.mappers().getMapper("name.indexed").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().tokenized(), equalTo(true)); - - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), instanceOf(StringFieldMapper.class)); - assertEquals(IndexOptions.NONE, docMapper.mappers().getMapper("name.not_indexed").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().stored(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().tokenized(), equalTo(true)); - - assertNull(doc.getField("age")); - f = doc.getField("age.not_stored"); - assertThat(f.name(), equalTo("age.not_stored")); - assertThat(f.numericValue(), equalTo((Number) 28L)); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("age.stored"); - assertThat(f.name(), equalTo("age.stored")); - assertThat(f.numericValue(), equalTo((Number) 28L)); - assertThat(f.fieldType().stored(), equalTo(true)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - assertThat(docMapper.mappers().getMapper("age"), notNullValue()); - assertThat(docMapper.mappers().getMapper("age"), instanceOf(LongFieldMapper.class)); - assertEquals(IndexOptions.NONE, docMapper.mappers().getMapper("age").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("age").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("age").fieldType().tokenized(), equalTo(false)); - - assertThat(docMapper.mappers().getMapper("age.not_stored"), notNullValue()); - assertThat(docMapper.mappers().getMapper("age.not_stored"), instanceOf(LongFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("age.not_stored").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("age.not_stored").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("age.not_stored").fieldType().tokenized(), equalTo(false)); - - assertThat(docMapper.mappers().getMapper("age.stored"), notNullValue()); - assertThat(docMapper.mappers().getMapper("age.stored"), instanceOf(LongFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("age.stored").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("age.stored").fieldType().stored(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("age.stored").fieldType().tokenized(), equalTo(false)); - } - - public void testConvertMultiFieldGeoPoint() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); - Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - boolean indexCreatedBefore22 = version.before(Version.V_2_2_0); - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-geo_point.json"); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); - - assertThat(docMapper.mappers().getMapper("a"), notNullValue()); - assertThat(docMapper.mappers().getMapper("a"), instanceOf(StringFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("a").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("a").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("a").fieldType().tokenized(), equalTo(false)); - - assertThat(docMapper.mappers().getMapper("a.b"), notNullValue()); - assertThat(docMapper.mappers().getMapper("a.b"), instanceOf(BaseGeoPointFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("a.b").fieldType().indexOptions()); - final boolean stored = indexCreatedBefore22 == false; - assertThat(docMapper.mappers().getMapper("a.b").fieldType().stored(), equalTo(stored)); - assertThat(docMapper.mappers().getMapper("a.b").fieldType().tokenized(), equalTo(false)); - final boolean hasDocValues = indexCreatedBefore22 == false; - assertThat(docMapper.mappers().getMapper("a.b").fieldType().hasDocValues(), equalTo(hasDocValues)); - - BytesReference json = jsonBuilder().startObject() - .field("a", "-1,-1") - .endObject().bytes(); - Document doc = docMapper.parse("test", "type", "1", json).rootDoc(); - - IndexableField f = doc.getField("a"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("a")); - assertThat(f.stringValue(), equalTo("-1,-1")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("a.b"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("a.b")); - if (indexCreatedBefore22 == true) { - assertThat(f.stringValue(), equalTo("-1.0,-1.0")); - } else { - assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-1.0, -1.0))); - } - assertThat(f.fieldType().stored(), equalTo(stored)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - assertThat(docMapper.mappers().getMapper("b"), notNullValue()); - assertThat(docMapper.mappers().getMapper("b"), instanceOf(BaseGeoPointFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("b").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("b").fieldType().stored(), equalTo(stored)); - assertThat(docMapper.mappers().getMapper("b").fieldType().tokenized(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("b").fieldType().hasDocValues(), equalTo(hasDocValues)); - - assertThat(docMapper.mappers().getMapper("b.a"), notNullValue()); - assertThat(docMapper.mappers().getMapper("b.a"), instanceOf(StringFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("b.a").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("b.a").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("b.a").fieldType().tokenized(), equalTo(false)); - - json = jsonBuilder().startObject() - .field("b", "-1,-1") - .endObject().bytes(); - doc = docMapper.parse("test", "type", "1", json).rootDoc(); - - f = doc.getField("b"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b")); - if (indexCreatedBefore22 == true) { - assertThat(f.stringValue(), equalTo("-1.0,-1.0")); - } else { - assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-1.0, -1.0))); - } - assertThat(f.fieldType().stored(), equalTo(stored)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("b.a"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b.a")); - assertThat(f.stringValue(), equalTo("-1,-1")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - json = jsonBuilder().startObject() - .startArray("b").startArray().value(-1).value(-1).endArray().startArray().value(-2).value(-2).endArray().endArray() - .endObject().bytes(); - doc = docMapper.parse("test", "type", "1", json).rootDoc(); - - f = doc.getFields("b")[0]; - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b")); - if (indexCreatedBefore22 == true) { - assertThat(f.stringValue(), equalTo("-1.0,-1.0")); - } else { - assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-1.0, -1.0))); - } - assertThat(f.fieldType().stored(), equalTo(stored)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getFields("b")[1]; - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b")); - if (indexCreatedBefore22 == true) { - assertThat(f.stringValue(), equalTo("-2.0,-2.0")); - } else { - assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-2.0, -2.0))); - } - assertThat(f.fieldType().stored(), equalTo(stored)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("b.a"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b.a")); - // NOTE: "]" B/c the lat,long aren't specified as a string, we miss the actual values when parsing the multi - // fields. We already skipped over the coordinates values and can't get to the coordinates. - // This happens if coordinates are specified as array and object. - assertThat(f.stringValue(), equalTo("]")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - } - - public void testConvertMultiFieldCompletion() throws Exception { - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-completion.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); - - assertThat(docMapper.mappers().getMapper("a"), notNullValue()); - assertThat(docMapper.mappers().getMapper("a"), instanceOf(StringFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("a").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("a").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("a").fieldType().tokenized(), equalTo(false)); - - assertThat(docMapper.mappers().getMapper("a.b"), notNullValue()); - assertThat(docMapper.mappers().getMapper("a.b"), instanceOf(CompletionFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("a.b").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("a.b").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("a.b").fieldType().tokenized(), equalTo(true)); - - BytesReference json = jsonBuilder().startObject() - .field("a", "complete me") - .endObject().bytes(); - Document doc = docMapper.parse("test", "type", "1", json).rootDoc(); - - IndexableField f = doc.getField("a"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("a")); - assertThat(f.stringValue(), equalTo("complete me")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("a.b"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("a.b")); - assertThat(f.stringValue(), equalTo("complete me")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - assertThat(docMapper.mappers().getMapper("b"), notNullValue()); - assertThat(docMapper.mappers().getMapper("b"), instanceOf(CompletionFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("b").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("b").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("b").fieldType().tokenized(), equalTo(true)); - - assertThat(docMapper.mappers().getMapper("b.a"), notNullValue()); - assertThat(docMapper.mappers().getMapper("b.a"), instanceOf(StringFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("b.a").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("b.a").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("b.a").fieldType().tokenized(), equalTo(false)); - - json = jsonBuilder().startObject() - .field("b", "complete me") - .endObject().bytes(); - doc = docMapper.parse("test", "type", "1", json).rootDoc(); - - f = doc.getField("b"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b")); - assertThat(f.stringValue(), equalTo("complete me")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - - f = doc.getField("b.a"); - assertThat(f, notNullValue()); - assertThat(f.name(), equalTo("b.a")); - assertThat(f.stringValue(), equalTo("complete me")); - assertThat(f.fieldType().stored(), equalTo(false)); - assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); - } - // The underlying order of the fields in multi fields in the mapping source should always be consistent, if not this // can to unnecessary re-syncing of the mappings between the local instance and cluster state public void testMultiFieldsInConsistentOrder() throws Exception { @@ -445,7 +197,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { } builder = builder.endObject().endObject().endObject().endObject().endObject(); String mapping = builder.string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); Arrays.sort(multiFieldNames); Map sourceAsMap = XContentHelper.convertToMap(docMapper.mappingSource().compressedReference(), true).v2(); @@ -486,8 +238,8 @@ public class MultiFieldTests extends ESSingleNodeTestCase { // Check the mapping remains identical when deserialed/re-serialsed final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(builder.string()); - DocumentMapper docMapper2 = parser.parse(docMapper.mappingSource().string()); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(builder.string())); + DocumentMapper docMapper2 = parser.parse("type", docMapper.mappingSource()); assertThat(docMapper.mappingSource(), equalTo(docMapper2.mappingSource())); } @@ -497,7 +249,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().endObject().string(); final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected mapping parse failure"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("cannot be used in multi field")); @@ -510,7 +262,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().endObject().string(); final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected mapping parse failure"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("cannot be used in multi field")); @@ -536,7 +288,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { MapperService mapperService = createIndex("test").mapperService(); try { - mapperService.documentMapperParser().parse(mapping.string()); + mapperService.documentMapperParser().parse("my_type", new CompressedXContent(mapping.string())); fail("this should throw an exception because one field contains a dot"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("Field name [raw.foo] which is a multi field of [city] cannot contain '.'")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java index 0c26324ac6c..e4892583cf8 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.multifield; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -121,12 +122,13 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase { assertThat(bField.get("type").toString(), equalTo("string")); assertThat(bField.get("index").toString(), equalTo("not_analyzed")); - client().prepareIndex("my-index", "my-type", "1").setSource("a", "51,19").setRefresh(true).get(); + GeoPoint point = new GeoPoint(51, 19); + client().prepareIndex("my-index", "my-type", "1").setSource("a", point.toString()).setRefresh(true).get(); SearchResponse countResponse = client().prepareSearch("my-index").setSize(0) .setQuery(constantScoreQuery(geoDistanceQuery("a").point(51, 19).distance(50, DistanceUnit.KILOMETERS))) .get(); assertThat(countResponse.getHits().totalHits(), equalTo(1l)); - countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "51,19")).get(); + countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", point.toString())).get(); assertThat(countResponse.getHits().totalHits(), equalTo(1l)); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java index 30890dcd22a..651b8c45d55 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java @@ -25,17 +25,12 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.Arrays; - import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -45,9 +40,9 @@ import static org.hamcrest.Matchers.nullValue; public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { public void testMergeMultiField() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), true, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); @@ -60,12 +55,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json"); - DocumentMapper docMapper2 = parser.parse(mapping); - - MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true, false); - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); - - docMapper.merge(docMapper2.mapping(), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -82,12 +72,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json"); - DocumentMapper docMapper3 = parser.parse(mapping); - - mergeResult = docMapper.merge(docMapper3.mapping(), true, false); - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); - - docMapper.merge(docMapper3.mapping(), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -98,12 +83,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json"); - DocumentMapper docMapper4 = parser.parse(mapping); - - mergeResult = docMapper.merge(docMapper4.mapping(), true, false); - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); - - docMapper.merge(docMapper4.mapping(), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -132,7 +112,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json"); - mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -149,7 +129,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json"); - mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java index be27e9f83fb..6debfa05ee9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.nested; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -36,7 +37,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .startObject("nested1").field("type", "nested").endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -62,7 +63,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .startObject("nested1").field("type", "nested").endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); @@ -111,7 +112,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); @@ -162,7 +163,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); @@ -213,7 +214,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); @@ -264,7 +265,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); @@ -315,7 +316,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.hasNestedObjects(), equalTo(true)); ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java b/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java index fedb2d83d5d..be3617aaa5a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java @@ -1,5 +1,7 @@ package org.elasticsearch.index.mapper.null_value; +import org.elasticsearch.common.compress.CompressedXContent; + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -49,7 +51,7 @@ public class NullValueTests extends ESSingleNodeTestCase { .endObject().string(); try { - indexService.mapperService().documentMapperParser().parse(mapping); + indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); fail("Test should have failed because [null_value] was null."); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("Property [null_value] cannot be null.")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index d93ae9b6787..624978bf7d0 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -26,6 +26,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -73,6 +74,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); + defaultMapper = index.mapperService().documentMapper("type"); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); assertThat(mapper, instanceOf(LongFieldMapper.class)); @@ -97,6 +99,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { assertNotNull(doc.dynamicMappingsUpdate()); assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get()); + defaultMapper = index.mapperService().documentMapper("type"); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); assertThat(mapper, instanceOf(StringFieldMapper.class)); @@ -113,7 +116,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -147,7 +150,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { // Unless the global ignore_malformed option is set to true Settings indexSettings = settingsBuilder().put("index.mapping.ignore_malformed", true).build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse(mapping); + defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field3", "a") @@ -184,7 +187,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); //Test numbers passed as strings String invalidJsonNumberAsString="1"; @@ -284,7 +287,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -320,7 +323,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -352,7 +355,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .field("date_detection", true) .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -402,7 +405,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -472,7 +475,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -531,7 +534,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().endObject().string(); try { - parser.parse(mappingWithTV); + parser.parse("type", new CompressedXContent(mappingWithTV)); fail(); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [term_vector : yes]")); @@ -541,7 +544,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) .build(); parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser(); - parser.parse(mappingWithTV); // no exception + parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception } public void testAnalyzerBackCompat() throws Exception { @@ -560,7 +563,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().endObject().string(); try { - parser.parse(mappingWithTV); + parser.parse("type", new CompressedXContent(mappingWithTV)); fail(); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [analyzer : keyword]")); @@ -570,6 +573,6 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) .build(); parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser(); - parser.parse(mappingWithTV); // no exception + parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/object/NullValueObjectMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/object/NullValueObjectMappingTests.java index b13fcc8ed91..0a03601ea62 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/object/NullValueObjectMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/object/NullValueObjectMappingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.object; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -37,7 +38,7 @@ public class NullValueObjectMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("obj1").field("type", "object").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java index 917ee9806ed..885e038de60 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.object; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -34,7 +35,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { defaultMapper.parse("test", "type", "1", new BytesArray(" {\n" + " \"object\": {\n" + @@ -59,7 +60,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("properties").endArray() .endObject().endObject().string(); - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); } public void testEmptyFieldsArrayMultiFields() throws Exception { @@ -77,7 +78,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .string(); - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } public void testFieldsArrayMultiFieldsShouldThrowException() throws Exception { @@ -98,7 +99,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { .endObject() .string(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); } catch(MapperParsingException e) { assertThat(e.getMessage(), containsString("expected map for property [fields]")); @@ -117,7 +118,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .string(); - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } public void testFieldsWithFilledArrayShouldThrowException() throws Exception { @@ -134,7 +135,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { .endObject() .string(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); } catch (MapperParsingException e) { assertThat(e.getMessage(), containsString("Expected map for property [fields]")); @@ -160,6 +161,6 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .string(); - createIndex("test").mapperService().documentMapperParser().parse(mapping); + createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java index 3719500669c..f6bbde47e9d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java @@ -18,9 +18,7 @@ */ package org.elasticsearch.index.mapper.parent; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -29,14 +27,12 @@ import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.test.ESSingleNodeTestCase; -import static org.hamcrest.Matchers.nullValue; - public class ParentMappingTests extends ESSingleNodeTestCase { public void testParentSetInDocNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() @@ -47,29 +43,11 @@ public class ParentMappingTests extends ESSingleNodeTestCase { } } - public void testParentSetInDocBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_parent").field("type", "p_type").endObject() - .endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); - - ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() - .startObject() - .field("_parent", "1122") - .field("x_field", "x_value") - .endObject() - .bytes()).type("type").id("1")); - - assertEquals("1122", doc.parent()); - assertEquals(Uid.createUid("p_type", "1122"), doc.rootDoc().get("_parent")); - } - public void testParentSet() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_parent").field("type", "p_type").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java index 2582562c039..715eefca9e6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.path; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -34,7 +35,7 @@ import static org.hamcrest.Matchers.nullValue; public class PathMapperTests extends ESSingleNodeTestCase { public void testPathMapping() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/path/test-mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); // test full name assertThat(docMapper.mappers().getMapper("first1"), nullValue()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java index 7d0afdb0724..a658948f022 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/routing/RoutingTypeMapperTests.java @@ -19,34 +19,22 @@ package org.elasticsearch.index.mapper.routing; -import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.Map; - -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; public class RoutingTypeMapperTests extends ESSingleNodeTestCase { public void testRoutingMapper() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder() .startObject() @@ -58,75 +46,9 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().get("field"), equalTo("value")); } - public void testFieldTypeSettingsBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_routing") - .field("store", "no") - .field("index", "no") - .endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); - assertThat(docMapper.routingFieldMapper().fieldType().stored(), equalTo(false)); - assertEquals(IndexOptions.NONE, docMapper.routingFieldMapper().fieldType().indexOptions()); - } - - public void testFieldTypeSettingsSerializationBackcompat() throws Exception { - String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_routing").field("store", "no").field("index", "no").endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper enabledMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(enabledMapping); - - XContentBuilder builder = JsonXContent.contentBuilder().startObject(); - enabledMapper.routingFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); - builder.close(); - Map serializedMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - serializedMap = parser.map(); - } - assertThat(serializedMap, hasKey("_routing")); - assertThat(serializedMap.get("_routing"), instanceOf(Map.class)); - Map routingConfiguration = (Map) serializedMap.get("_routing"); - assertThat(routingConfiguration, hasKey("store")); - assertThat(routingConfiguration.get("store").toString(), is("false")); - assertThat(routingConfiguration, hasKey("index")); - assertThat(routingConfiguration.get("index").toString(), is("no")); - } - - public void testPathBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_routing").field("path", "custom_routing").endObject() - .endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_routing", "routing_value").endObject(); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(MetaData.builder().build(), mappingMetaData, true, "test"); - - assertEquals(request.routing(), "routing_value"); - } - - public void testIncludeInObjectBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_routing", "foo").endObject(); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(MetaData.builder().build(), mappingMetaData, true, "test"); - - // _routing in a document never worked, so backcompat is ignoring the field - assertNull(request.routing()); - assertNull(docMapper.parse("test", "type", "1", doc.bytes()).rootDoc().get("_routing")); - } - public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java index 0e8c74aee89..ed9792fb44e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.mapper.simple; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; @@ -47,74 +47,70 @@ import static org.hamcrest.Matchers.equalTo; public class SimpleMapperTests extends ESSingleNodeTestCase { public void testSimpleMapper() throws Exception { IndexService indexService = createIndex("test"); - Settings settings = indexService.getIndexSettings().getSettings(); - DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - DocumentMapper docMapper = doc(settings, + DocumentMapper docMapper = doc( rootObject("person") .add(object("name").add(stringField("first").store(true).index(false))), - indexService.mapperService()).build(indexService.mapperService(), mapperParser); + indexService.mapperService()).build(indexService.mapperService()); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); doc = docMapper.parse("test", "person", "1", json).rootDoc(); } public void testParseToJsonAndParse() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json"); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping)); String builtMapping = docMapper.mappingSource().string(); // reparse it - DocumentMapper builtDocMapper = parser.parse(builtMapping); + DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = builtDocMapper.parse("test", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().name()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); } public void testSimpleParser() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); assertThat((String) docMapper.meta().get("param1"), equalTo("value1")); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().name()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); } public void testSimpleParserNoTypeNoId() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json")); Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().name()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); } public void testAttributes() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json"); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping)); assertThat((String) docMapper.meta().get("param1"), equalTo("value1")); String builtMapping = docMapper.mappingSource().string(); - DocumentMapper builtDocMapper = parser.parse(builtMapping); + DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); assertThat((String) builtDocMapper.meta().get("param1"), equalTo("value1")); } public void testNoDocumentSent() throws Exception { IndexService indexService = createIndex("test"); - Settings settings = indexService.getIndexSettings().getSettings(); - DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - DocumentMapper docMapper = doc(settings, + DocumentMapper docMapper = doc( rootObject("person") .add(object("name").add(stringField("first").store(true).index(false))), - indexService.mapperService()).build(indexService.mapperService(), mapperParser); + indexService.mapperService()).build(indexService.mapperService()); BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8)); try { @@ -132,7 +128,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase { .startObject("foo.bar").field("type", "string").endObject() .endObject().endObject().string(); try { - mapperParser.parse(mapping); + mapperParser.parse("type", new CompressedXContent(mapping)); fail("Mapping parse should have failed"); } catch (MapperParsingException e) { assertTrue(e.getMessage(), e.getMessage().contains("cannot contain '.'")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index 364e9f2063f..35b127b6283 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -24,21 +24,22 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.VersionUtils; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class DefaultSourceMappingTests extends ESSingleNodeTestCase { @@ -49,14 +50,14 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper documentMapper = parser.parse(mapping); + DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() .field("field", "value") .endObject().bytes()); assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON)); - documentMapper = parser.parse(mapping); + documentMapper = parser.parse("type", new CompressedXContent(mapping)); doc = documentMapper.parse("test", "type", "1", XContentFactory.smileBuilder().startObject() .field("field", "value") .endObject().bytes()); @@ -73,7 +74,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .build(); DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser(); - parser.parse(mapping); // no exception + parser.parse("type", new CompressedXContent(mapping)); // no exception } public void testIncludes() throws Exception { @@ -81,7 +82,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .startObject("_source").field("includes", new String[]{"path1*"}).endObject() .endObject().endObject().string(); - DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() @@ -102,7 +103,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .startObject("_source").field("excludes", new String[]{"path1*"}).endObject() .endObject().endObject().string(); - DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() @@ -136,7 +137,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { // all is well } try { - mapper = parser.parse(null, "{}", defaultMapping); + mapper = parser.parse(null, new CompressedXContent("{}"), defaultMapping); assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.sourceMapper().enabled(), equalTo(false)); fail(); @@ -155,7 +156,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .startObject("_source").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("my_type", mapping, defaultMapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("my_type", new CompressedXContent(mapping), defaultMapping); assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.sourceMapper().enabled(), equalTo(true)); } @@ -192,15 +193,20 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { } void assertConflicts(String mapping1, String mapping2, DocumentMapperParser parser, String... conflicts) throws IOException { - DocumentMapper docMapper = parser.parse(mapping1); - docMapper = parser.parse(docMapper.mappingSource().string()); - MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true, false); - - List expectedConflicts = new ArrayList<>(Arrays.asList(conflicts)); - for (String conflict : mergeResult.buildConflicts()) { - assertTrue("found unexpected conflict [" + conflict + "]", expectedConflicts.remove(conflict)); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping1)); + docMapper = parser.parse("type", docMapper.mappingSource()); + if (conflicts.length == 0) { + docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), false); + } else { + try { + docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), false); + fail(); + } catch (IllegalArgumentException e) { + for (String conflict : conflicts) { + assertThat(e.getMessage(), containsString(conflict)); + } + } } - assertTrue("missing conflicts: " + Arrays.toString(expectedConflicts.toArray()), expectedConflicts.isEmpty()); } public void testEnabledNotUpdateable() throws Exception { @@ -258,27 +264,27 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { public void testComplete() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - assertTrue(parser.parse(mapping).sourceMapper().isComplete()); + assertTrue(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").field("enabled", false).endObject() .endObject().endObject().string(); - assertFalse(parser.parse(mapping).sourceMapper().isComplete()); + assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", "foo.*").endObject() .endObject().endObject().string(); - assertFalse(parser.parse(mapping).sourceMapper().isComplete()); + assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", "foo.*").endObject() .endObject().endObject().string(); - assertFalse(parser.parse(mapping).sourceMapper().isComplete()); + assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); } public void testSourceObjectContainsExtraTokens() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { documentMapper.parse("test", "type", "1", new BytesArray("{}}")); // extra end object (invalid JSON) diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index 9ac039a49fb..7bd4d9a78c3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -40,7 +40,7 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.StringFieldMapper; @@ -77,7 +77,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("ignore_above", 5).endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -123,7 +123,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { private void assertParseIdemPotent(IndexableFieldType expected, DocumentMapper mapper) throws Exception { String mapping = mapper.toXContent(XContentFactory.jsonBuilder().startObject(), new ToXContent.MapParams(emptyMap())).endObject().string(); - mapper = parser.parse(mapping); + mapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "2345") @@ -137,7 +137,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -155,7 +155,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -178,7 +178,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").startObject("norms").field("enabled", true).endObject().field("index_options", "freqs").endObject().endObject() .endObject().endObject().string(); - defaultMapper = parser.parse(mapping); + defaultMapper = parser.parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -201,7 +201,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").field("omit_norms", false).endObject().endObject() .endObject().endObject().string(); - defaultMapper = parser.parse(mapping); + defaultMapper = parser.parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -243,7 +243,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); for (String fieldName : Arrays.asList("field1", "field2", "field3", "field4")) { Map serializedMap = getSerializedMap(fieldName, mapper); assertFalse(fieldName, serializedMap.containsKey("search_quote_analyzer")); @@ -267,7 +267,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - mapper = parser.parse(mapping); + mapper = parser.parse("type", new CompressedXContent(mapping)); for (String fieldName : Arrays.asList("field1", "field2")) { Map serializedMap = getSerializedMap(fieldName, mapper); assertEquals(serializedMap.get("search_quote_analyzer"), "simple"); @@ -319,7 +319,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -390,7 +390,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -444,7 +444,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -479,7 +479,8 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), true, false); + MapperService mapperService = indexService.mapperService(); + DocumentMapper defaultMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -493,8 +494,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject() .endObject().endObject().endObject().endObject().string(); - MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), false, false); - assertFalse(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts()); + defaultMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -509,7 +509,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject() .endObject().endObject().endObject().endObject().string(); try { - defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false); + mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("different [omit_norms]")); @@ -533,31 +533,11 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .field("analyzer", "standard") .endObject().endObject().endObject().endObject().string(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("Mapping definition should fail with the position_offset_gap setting"); }catch (MapperParsingException e) { assertEquals(e.getMessage(), "Mapping definition for [field2] has unsupported parameters: [position_offset_gap : 50]"); } } - /** - * Test backward compatibility - */ - public void testBackwardCompatible() throws Exception { - - Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, - Version.V_1_7_1)).build(); - - DocumentMapperParser parser = createIndex("backward_compatible_index", settings).mapperService().documentMapperParser(); - - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .field("position_offset_gap", 10) - .endObject().endObject().endObject().endObject().string(); - parser.parse(mapping); - - assertThat(parser.parse(mapping).mapping().toString(), containsString("\"position_increment_gap\":10")); - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index 53a3bf7bb6e..51ef9ff0024 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -32,17 +32,13 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; @@ -50,34 +46,25 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.Version.V_1_5_0; -import static org.elasticsearch.Version.V_2_0_0_beta1; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; /** */ public class TimestampMappingTests extends ESSingleNodeTestCase { - Settings BWC_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); public void testSimpleDisabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -92,7 +79,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", "yes").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -106,105 +93,37 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { } public void testDefaultValues() throws Exception { - for (Version version : Arrays.asList(V_1_5_0, V_2_0_0_beta1, randomVersion(random()))) { - for (String mapping : Arrays.asList( - XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(), - XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().string())) { - DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse(mapping); - assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled)); - assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); - assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions())); - assertThat(docMapper.timestampFieldMapper().path(), equalTo(TimestampFieldMapper.Defaults.PATH)); - assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); - String expectedFormat = version.onOrAfter(Version.V_2_0_0_beta1) ? TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT : - TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER_BEFORE_2_0.format(); - assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo(expectedFormat)); - assertAcked(client().admin().indices().prepareDelete("test").execute().get()); - } + Version version; + do { + version = randomVersion(random()); + } while (version.before(Version.V_2_0_0_beta1)); + for (String mapping : Arrays.asList( + XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(), + XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().string())) { + DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled)); + assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); + assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions())); + assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); + assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo(TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT)); + assertAcked(client().admin().indices().prepareDelete("test").execute().get()); } } - public void testBackcompatSetValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes").field("store", "no").field("index", "no") - .field("path", "timestamp").field("format", "year") - .field("doc_values", true) - .endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping); - assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(true)); - assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(false)); - assertEquals(IndexOptions.NONE, docMapper.timestampFieldMapper().fieldType().indexOptions()); - assertThat(docMapper.timestampFieldMapper().path(), equalTo("timestamp")); - assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo("year")); - assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(true)); - } - public void testThatDisablingDuringMergeIsWorking() throws Exception { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper enabledMapper = parser.parse(enabledMapping); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = parser.parse(disabledMapping); + DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false); - enabledMapper.merge(disabledMapper.mapping(), false, false); - - assertThat(enabledMapper.timestampFieldMapper().enabled(), is(false)); - } - - // issue 3174 - public void testThatSerializationWorksCorrectlyForIndexField() throws Exception { - String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("store", "yes").field("index", "no").endObject() - .endObject().endObject().string(); - DocumentMapper enabledMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(enabledMapping); - - XContentBuilder builder = JsonXContent.contentBuilder().startObject(); - enabledMapper.timestampFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); - builder.close(); - Map serializedMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - serializedMap = parser.map(); - } - assertThat(serializedMap, hasKey("_timestamp")); - assertThat(serializedMap.get("_timestamp"), instanceOf(Map.class)); - Map timestampConfiguration = (Map) serializedMap.get("_timestamp"); - assertThat(timestampConfiguration, hasKey("index")); - assertThat(timestampConfiguration.get("index").toString(), is("no")); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testBackcompatPathMissingDefaultValue() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("path", "timestamp") - .field("ignore_missing", false) - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string()); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - try { - request.process(metaData, mappingMetaData, true, "test"); - fail(); - } catch (TimestampParsingException e) { - assertThat(e.getDetailedMessage(), containsString("timestamp is required by mapping")); - } + assertThat(enabledMapper.timestampFieldMapper().enabled(), is(true)); + assertThat(disabledMapper.timestampFieldMapper().enabled(), is(false)); } // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] @@ -220,7 +139,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject(); MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -233,32 +152,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { assertThat(delay, lessThanOrEqualTo(60000L)); } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testBackcompatPathMissingDefaultToEpochValue() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("path", "timestamp") - .field("default", "1970-01-01") - .field("format", "YYYY-MM-dd") - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string()); - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(metaData, mappingMetaData, true, "test"); - assertThat(request.timestamp(), notNullValue()); - assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"), Version.CURRENT))); - } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] public void testTimestampMissingDefaultToEpochValue() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -273,7 +166,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .field("foo", "bar") .endObject(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -284,35 +177,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"), Version.CURRENT))); } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testBackcompatPathMissingNowDefaultValue() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("path", "timestamp") - .field("default", "now") - .field("format", "YYYY-MM-dd") - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string()); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(metaData, mappingMetaData, true, "test"); - assertThat(request.timestamp(), notNullValue()); - - // We should have less than one minute (probably some ms) - long delay = System.currentTimeMillis() - Long.parseLong(request.timestamp()); - assertThat(delay, lessThanOrEqualTo(60000L)); - } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] public void testTimestampMissingNowDefaultValue() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -328,7 +192,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject(); MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -351,41 +215,13 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null"); } catch (TimestampParsingException e) { assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null")); } } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testBackcompatPathMissingShouldFail() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("path", "timestamp") - .field("ignore_missing", false) - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string()); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - try { - request.process(metaData, mappingMetaData, true, "test"); - fail("we should reject the mapping with a TimestampParsingException: timestamp is required by mapping"); - } catch (TimestampParsingException e) { - assertThat(e.getDetailedMessage(), containsString("timestamp is required by mapping")); - } - } - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] public void testTimestampMissingWithForcedNullDefaultShouldFail() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -396,7 +232,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject().endObject(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null"); } catch (TimestampParsingException e) { assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null")); @@ -414,7 +250,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject().endObject(); try { - createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set with ignore_missing set to false"); } catch (TimestampParsingException e) { assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false")); @@ -434,7 +270,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject(); MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); @@ -451,10 +287,10 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { public void testDefaultTimestampStream() throws IOException { // Testing null value for default timestamp { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, + MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, null, null); MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); + new MappingMetaData.Routing(false), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); expected.writeTo(out); @@ -468,10 +304,10 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { // Testing "now" value for default timestamp { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, + MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", null); MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); + new MappingMetaData.Routing(false), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); expected.writeTo(out); @@ -485,10 +321,10 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { // Testing "ignore_missing" value for default timestamp { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, + MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", false); MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); + new MappingMetaData.Routing(false), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); expected.writeTo(out); @@ -501,26 +337,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { } } - public void testMergingFielddataLoadingWorks() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "lazy").field("format", "doc_values").endObject().field("store", "yes").endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser(); - - DocumentMapper docMapper = parser.parse(mapping); - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject() - .endObject().endObject().string(); - - MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), false, false); - assertThat(mergeResult.buildConflicts().length, equalTo(0)); - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER)); - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array")); - } - public void testParsingNotDefaultTwiceDoesNotChangeMapping() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp") @@ -529,120 +345,11 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(mapping); - docMapper = parser.parse(docMapper.mappingSource().string()); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); + docMapper = parser.parse("type", docMapper.mappingSource()); assertThat(docMapper.mappingSource().string(), equalTo(mapping)); } - public void testBackcompatParsingTwiceDoesNotChangeTokenizeValue() throws Exception { - String[] index_options = {"no", "analyzed", "not_analyzed"}; - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true) - .field("index", index_options[randomInt(2)]) - .field("store", true) - .field("path", "foo") - .field("default", "1970-01-01") - .startObject("fielddata").field("format", "doc_values").endObject() - .endObject() - .startObject("properties") - .endObject() - .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser(); - - DocumentMapper docMapper = parser.parse(mapping); - boolean tokenized = docMapper.timestampFieldMapper().fieldType().tokenized(); - docMapper = parser.parse(docMapper.mappingSource().string()); - assertThat(tokenized, equalTo(docMapper.timestampFieldMapper().fieldType().tokenized())); - } - - public void testMergingConflicts() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true) - .field("store", "yes") - .field("index", "analyzed") - .field("path", "foo") - .field("default", "1970-01-01") - .endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - MapperService mapperService = createIndex("test", indexSettings).mapperService(); - - DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false); - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", false) - .startObject("fielddata").field("format", "array").endObject() - .field("store", "no") - .field("index", "no") - .field("path", "bar") - .field("default", "1970-01-02") - .endObject() - .endObject().endObject().string(); - - try { - mapperService.merge("type", new CompressedXContent(mapping), false, false); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [index] values")); - assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [store] values")); - } - - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); - assertTrue(docMapper.timestampFieldMapper().enabled()); - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true) - .field("store", "yes") - .field("index", "analyzed") - .field("path", "bar") - .field("default", "1970-01-02") - .endObject() - .endObject().endObject().string(); - try { - mapperService.merge("type", new CompressedXContent(mapping), false, false); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02")); - assertThat(e.getMessage(), containsString("Cannot update path in _timestamp value. Value is foo path in merged mapping is bar")); - } - } - - public void testBackcompatMergingConflictsForIndexValues() throws Exception { - List indexValues = new ArrayList<>(); - indexValues.add("analyzed"); - indexValues.add("no"); - indexValues.add("not_analyzed"); - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp") - .field("index", indexValues.remove(randomInt(2))) - .endObject() - .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser(); - - DocumentMapper docMapper = parser.parse(mapping); - mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp") - .field("index", indexValues.remove(randomInt(1))) - .endObject() - .endObject().endObject().string(); - - MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false); - List expectedConflicts = new ArrayList<>(); - expectedConflicts.add("mapper [_timestamp] has different [index] values"); - expectedConflicts.add("mapper [_timestamp] has different [tokenize] values"); - if (indexValues.get(0).equals("not_analyzed") == false) { - // if the only index value left is not_analyzed, then the doc values setting will be the same, but in the - // other two cases, it will change - expectedConflicts.add("mapper [_timestamp] has different [doc_values] values"); - } - - for (String conflict : mergeResult.buildConflicts()) { - assertThat(conflict, isIn(expectedConflicts)); - } - } - /** * Test for issue #9223 */ @@ -658,133 +365,22 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { new MappingMetaData(new CompressedXContent(mapping)); } - public void testBackcompatMergePaths() throws Exception { - String[] possiblePathValues = {"some_path", "anotherPath", null}; - DocumentMapperParser parser = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser(); - XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp"); - String path1 = possiblePathValues[randomInt(2)]; - if (path1!=null) { - mapping1.field("path", path1); + void assertConflict(MapperService mapperService, String type, String mapping1, String mapping2, String conflict) throws IOException { + mapperService.merge("type", new CompressedXContent(mapping1), true, false); + try { + mapperService.merge("type", new CompressedXContent(mapping2), false, false); + assertNull(conflict); + } catch (IllegalArgumentException e) { + assertNotNull(conflict); + assertThat(e.getMessage(), containsString(conflict)); } - mapping1.endObject() - .endObject().endObject(); - XContentBuilder mapping2 = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp"); - String path2 = possiblePathValues[randomInt(2)]; - if (path2!=null) { - mapping2.field("path", path2); - } - mapping2.endObject() - .endObject().endObject(); - - assertConflict(mapping1.string(), mapping2.string(), parser, (path1 == path2 ? null : "Cannot update path in _timestamp value")); - } - - void assertConflict(String mapping1, String mapping2, DocumentMapperParser parser, String conflict) throws IOException { - DocumentMapper docMapper = parser.parse(mapping1); - docMapper = parser.parse(docMapper.mappingSource().string()); - MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true, false); - assertThat(mergeResult.buildConflicts().length, equalTo(conflict == null ? 0 : 1)); - if (conflict != null) { - assertThat(mergeResult.buildConflicts()[0], containsString(conflict)); - } - } - - public void testBackcompatDocValuesSerialization() throws Exception { - // default - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - - // just format specified - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .startObject("fielddata").field("format", "doc_values").endObject() - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - - // explicitly enabled - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("doc_values", true) - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - - // explicitly disabled - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("doc_values", false) - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - - // explicitly enabled, with format - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("doc_values", true) - .startObject("fielddata").field("format", "doc_values").endObject() - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - - // explicitly disabled, with format - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("doc_values", false) - .startObject("fielddata").field("format", "doc_values").endObject() - .endObject().endObject().endObject().string(); - assertDocValuesSerialization(mapping); - } - - void assertDocValuesSerialization(String mapping) throws Exception { - DocumentMapperParser parser = createIndex("test_doc_values", BWC_SETTINGS).mapperService().documentMapperParser(); - DocumentMapper docMapper = parser.parse(mapping); - boolean docValues = docMapper.timestampFieldMapper().fieldType().hasDocValues(); - docMapper = parser.parse(docMapper.mappingSource().string()); - assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(docValues)); - assertAcked(client().admin().indices().prepareDelete("test_doc_values")); - } - - public void testBackcompatPath() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("path", "custom_timestamp").endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping); - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_timestamp", 1).endObject(); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(metaData, mappingMetaData, true, "test"); - - assertThat(request.timestamp(), is("1")); - } - - public void testIncludeInObjectBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject() - .endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_timestamp", 2000000).endObject(); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(metaData, mappingMetaData, true, "test"); - - // _timestamp in a document never worked, so backcompat is ignoring the field - assertEquals(MappingMetaData.Timestamp.parseStringTimestamp("1970", Joda.forPattern("YYYY"), Version.V_1_4_2), request.timestamp()); - assertNull(docMapper.parse("test", "type", "1", doc.bytes()).rootDoc().get("_timestamp")); } public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() @@ -799,7 +395,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); XContentBuilder doc = XContentFactory.jsonBuilder().startObject().endObject(); @@ -820,7 +416,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { // // test with older versions Settings oldSettings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersionBetween(random(), Version.V_0_90_0, Version.V_1_6_0)).build(); - DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); @@ -832,7 +428,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { // // test with 2.x - DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse(mapping); + DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); MetaData newMetaData = client().admin().cluster().prepareState().get().getState().getMetaData(); // this works with 2.x diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java index efe07615532..fa27e9bcfb7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java @@ -33,9 +33,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; @@ -51,7 +50,7 @@ import static org.hamcrest.Matchers.notNullValue; public class TTLMappingTests extends ESSingleNodeTestCase { public void testSimpleDisabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -66,7 +65,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_ttl").field("enabled", "yes").endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -81,25 +80,12 @@ public class TTLMappingTests extends ESSingleNodeTestCase { public void testDefaultValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(TTLFieldMapper.Defaults.ENABLED_STATE.enabled)); assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.stored())); assertThat(docMapper.TTLFieldMapper().fieldType().indexOptions(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.indexOptions())); } - public void testSetValuesBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("enabled", "yes").field("store", "no") - .endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); - assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(true)); - assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(true)); // store was never serialized, so it was always lost - - } - public void testThatEnablingTTLFieldOnMergeWorks() throws Exception { String mappingWithoutTtl = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").field("field").startObject().field("type", "string").endObject().endObject() @@ -112,14 +98,12 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .startObject("properties").field("field").startObject().field("type", "string").endObject().endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper mapperWithoutTtl = parser.parse(mappingWithoutTtl); - DocumentMapper mapperWithTtl = parser.parse(mappingWithTtl); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper mapperWithoutTtl = mapperService.merge("type", new CompressedXContent(mappingWithoutTtl), true, false); + DocumentMapper mapperWithTtl = mapperService.merge("type", new CompressedXContent(mappingWithTtl), false, false); - MergeResult mergeResult = mapperWithoutTtl.merge(mapperWithTtl.mapping(), false, false); - - assertThat(mergeResult.hasConflicts(), equalTo(false)); - assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(true)); + assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(false)); + assertThat(mapperWithTtl.TTLFieldMapper().enabled(), equalTo(true)); } public void testThatChangingTTLKeepsMapperEnabled() throws Exception { @@ -137,26 +121,27 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .startObject("properties").field("field").startObject().field("type", "string").endObject().endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper initialMapper = parser.parse(mappingWithTtl); - DocumentMapper updatedMapper = parser.parse(updatedMapping); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), true, false); + DocumentMapper updatedMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); - MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true, false); - - assertThat(mergeResult.hasConflicts(), equalTo(false)); assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); + assertThat(updatedMapper.TTLFieldMapper().enabled(), equalTo(true)); } public void testThatDisablingTTLReportsConflict() throws Exception { String mappingWithTtl = getMappingWithTtlEnabled().string(); String mappingWithTtlDisabled = getMappingWithTtlDisabled().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper initialMapper = parser.parse(mappingWithTtl); - DocumentMapper updatedMapper = parser.parse(mappingWithTtlDisabled); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), true, false); - MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true, false); + try { + mapperService.merge("type", new CompressedXContent(mappingWithTtlDisabled), false, false); + fail(); + } catch (IllegalArgumentException e) { + // expected + } - assertThat(mergeResult.hasConflicts(), equalTo(true)); assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); } @@ -189,23 +174,20 @@ public class TTLMappingTests extends ESSingleNodeTestCase { public void testNoConflictIfNothingSetAndDisabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean(), false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlDisabled.string()), randomBoolean(), false); } public void testNoConflictIfNothingSetAndEnabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean(), false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlEnabled.string()), randomBoolean(), false); } public void testMergeWithOnlyDefaultSet() throws Exception { XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), false, false); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @@ -216,93 +198,16 @@ public class TTLMappingTests extends ESSingleNodeTestCase { CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false); - assertFalse(mergeResult.hasConflicts()); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), false, false); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } - public void testThatSimulatedMergingLeavesStateUntouched() throws Exception { - //check if default ttl changed when simulate set to true - XContentBuilder mappingWithTtl = getMappingWithTtlEnabled("6d"); - IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtl); - CompressedXContent mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); - XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true, false); - assertFalse(mergeResult.hasConflicts()); - // make sure simulate flag actually worked - no mappings applied - CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); - - client().admin().indices().prepareDelete("testindex").get(); - // check if enabled changed when simulate set to true - XContentBuilder mappingWithoutTtl = getMappingWithTtlDisabled(); - indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); - mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); - XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled(); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false); - assertFalse(mergeResult.hasConflicts()); - // make sure simulate flag actually worked - no mappings applied - mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); - - client().admin().indices().prepareDelete("testindex").get(); - // check if enabled changed when simulate set to true - mappingWithoutTtl = getMappingWithTtlDisabled("6d"); - indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); - mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); - mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false); - assertFalse(mergeResult.hasConflicts()); - // make sure simulate flag actually worked - no mappings applied - mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); - - client().admin().indices().prepareDelete("testindex").get(); - // check if switching simulate flag off works - mappingWithoutTtl = getMappingWithTtlDisabled("6d"); - indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); - mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false); - assertFalse(mergeResult.hasConflicts()); - // make sure simulate flag actually worked - mappings applied - mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); - - client().admin().indices().prepareDelete("testindex").get(); - // check if switching simulate flag off works if nothing was applied in the beginning - indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); - mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false); - assertFalse(mergeResult.hasConflicts()); - // make sure simulate flag actually worked - mappings applied - mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); - - } - - public void testIncludeInObjectBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl").field("enabled", true).endObject() - .endObject().endObject().string(); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_ttl", "2d").endObject(); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(MetaData.builder().build(), mappingMetaData, true, "test"); - - // _ttl in a document never worked, so backcompat is ignoring the field - assertNull(request.ttl()); - assertNull(docMapper.parse("test", "type", "1", doc.bytes()).rootDoc().get("_ttl")); - } - public void testIncludeInObjectNotAllowed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_ttl").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseDocumentTypeLevelsTests.java b/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseDocumentTypeLevelsTests.java index 26d710b137f..e5d6431492d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseDocumentTypeLevelsTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseDocumentTypeLevelsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.typelevels; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; @@ -33,7 +34,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testNoLevel() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -51,7 +52,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testTypeLevel() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") @@ -69,7 +70,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testNoLevelWithFieldTypeAsValue() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -89,7 +90,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testTypeLevelWithFieldTypeAsValue() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") @@ -109,7 +110,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testNoLevelWithFieldTypeAsObject() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -129,7 +130,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testTypeLevelWithFieldTypeAsObject() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") @@ -149,7 +150,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") @@ -169,7 +170,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") @@ -189,7 +190,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -210,7 +211,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase { public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseMappingTypeLevelTests.java b/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseMappingTypeLevelTests.java index d99efee6824..1d849d50932 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseMappingTypeLevelTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/typelevels/ParseMappingTypeLevelTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.typelevels; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; @@ -34,11 +35,7 @@ public class ParseMappingTypeLevelTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper mapper = parser.parse("type", mapping); - assertThat(mapper.type(), equalTo("type")); - assertThat(mapper.timestampFieldMapper().enabled(), equalTo(true)); - - mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertThat(mapper.type(), equalTo("type")); assertThat(mapper.timestampFieldMapper().enabled(), equalTo(true)); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index abf5f4819cd..a53295d7fea 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -77,9 +76,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); // simulate like in MetaDataMappingService#putMapping - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false, false); - // assure we have no conflicts - assertThat(mergeResult.buildConflicts().length, equalTo(0)); + indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), false, false); // make sure mappings applied CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string())); @@ -101,9 +98,12 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); // simulate like in MetaDataMappingService#putMapping - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true, false); - // assure we have conflicts - assertThat(mergeResult.buildConflicts().length, equalTo(1)); + try { + indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), true, false); + fail(); + } catch (IllegalArgumentException e) { + // expected + } // make sure simulate flag actually worked - no mappings applied CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate)); @@ -123,14 +123,14 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { mapperService.merge("type", new CompressedXContent(update.string()), false, false); fail(); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); + assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]")); } try { mapperService.merge("type", new CompressedXContent(update.string()), false, false); fail(); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); + assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]")); } assertTrue(mapperService.documentMapper("type").mapping().root().getMapper("foo") instanceof LongFieldMapper); @@ -202,21 +202,49 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { assertNull(mapperService.documentMapper("type2").mapping().root().getMapper("foo")); } - public void testIndexFieldParsingBackcompat() throws IOException { - IndexService indexService = createIndex("test", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build()); - XContentBuilder indexMapping = XContentFactory.jsonBuilder(); - boolean enabled = randomBoolean(); - indexMapping.startObject() - .startObject("type") - .startObject("_index") - .field("enabled", enabled) - .endObject() - .endObject() - .endObject(); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true); - assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled)); - documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); - assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled)); + public void testReuseMetaField() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("_id").field("type", "string").endObject() + .endObject().endObject().endObject(); + MapperService mapperService = createIndex("test", Settings.settingsBuilder().build()).mapperService(); + + try { + mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); + } + + try { + mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); + } + } + + public void testReuseMetaFieldBackCompat() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("_id").field("type", "string").endObject() + .endObject().endObject().endObject(); + // the logic is different for 2.x indices since they record some meta mappers (including _id) + // in the root object + Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0).build(); + MapperService mapperService = createIndex("test", settings).mapperService(); + + try { + mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); + } + + try { + mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); + } } public void testTimestampParsing() throws IOException { @@ -227,10 +255,6 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { .startObject("type") .startObject("_timestamp") .field("enabled", enabled) - .field("store", true) - .startObject("fielddata") - .field("format", "doc_values") - .endObject() .endObject() .endObject() .endObject(); diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index aa97d722737..8faa2dac524 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.io.JsonStringEncoder; - import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -47,6 +46,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.geo.builders.ShapeBuilderRegistry; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; @@ -60,7 +60,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.index.Index; @@ -82,8 +86,13 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.script.*; +import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script.ScriptParseException; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptContextRegistry; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.ScriptModule; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -103,10 +112,20 @@ import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.concurrent.ExecutionException; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; public abstract class AbstractQueryTestCase> extends ESTestCase { @@ -191,6 +210,7 @@ public abstract class AbstractQueryTestCase> // skip services bindQueryParsersExtension(); bindMapperExtension(); + bind(ShapeBuilderRegistry.class).asEagerSingleton(); } }, new ScriptModule(settings) { diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 5d94a9a8bca..1c407fbaa0e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -19,18 +19,27 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; - import org.hamcrest.Matchers; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; -import static org.elasticsearch.index.query.QueryBuilders.*; +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; @@ -245,6 +254,24 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase { /** @@ -145,26 +151,26 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java index 6a952cebc0c..a853660495d 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java @@ -21,8 +21,8 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.GeoPointDistanceRangeQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.util.GeoDistanceUtils; import org.apache.lucene.util.NumericUtils; -import org.apache.lucene.util.SloppyMath; import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; @@ -55,7 +55,7 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase public void testFromJson() throws IOException { String json = - "{\n" + - " \"geohash_cell\" : {\n" + - " \"neighbors\" : true,\n" + - " \"precision\" : 3,\n" + - " \"pin\" : \"t4mk70fgk067\",\n" + - " \"boost\" : 1.0\n" + - " }\n" + + "{\n" + + " \"geohash_cell\" : {\n" + + " \"neighbors\" : true,\n" + + " \"precision\" : 3,\n" + + " \"pin\" : \"t4mk70fgk067\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + "}"; GeohashCellQuery.Builder parsed = (GeohashCellQuery.Builder) parseQuery(json); checkGeneratedJson(json, parsed); diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java index 51da0fc3996..f2b3a1a5026 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java @@ -21,19 +21,18 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.fasterxml.jackson.core.JsonParseException; - import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Uid; @@ -50,10 +49,9 @@ import org.elasticsearch.test.TestSearchContext; import java.io.IOException; import java.util.Collections; -import static org.hamcrest.Matchers.containsString; - import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.containsString; public class HasChildQueryBuilderTests extends AbstractQueryTestCase { protected static final String PARENT_TYPE = "parent"; diff --git a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java index b391930c32c..5dc1d66cd5f 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.fasterxml.jackson.core.JsonParseException; - import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ElasticsearchParseException; @@ -45,10 +44,9 @@ import org.elasticsearch.test.TestSearchContext; import java.io.IOException; import java.util.Arrays; -import static org.hamcrest.Matchers.containsString; - import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.containsString; public class HasParentQueryBuilderTests extends AbstractQueryTestCase { protected static final String PARENT_TYPE = "parent"; @@ -245,20 +243,20 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase { private List randomTerms; @@ -260,11 +260,11 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase { @Override - public MappedFieldType.Names getFieldNames() { - return new MappedFieldType.Names("test"); + public String getFieldName() { + return "test"; } @Override @@ -167,8 +170,8 @@ public class FunctionScoreTests extends ESTestCase { } @Override - public MappedFieldType.Names getFieldNames() { - return new MappedFieldType.Names("test"); + public String getFieldName() { + return "test"; } @Override @@ -355,11 +358,11 @@ public class FunctionScoreTests extends ESTestCase { // now test all together functionExplanation = getFiltersFunctionScoreExplanation(searcher - , RANDOM_SCORE_FUNCTION - , FIELD_VALUE_FACTOR_FUNCTION - , GAUSS_DECAY_FUNCTION - , EXP_DECAY_FUNCTION - , LIN_DECAY_FUNCTION + , RANDOM_SCORE_FUNCTION + , FIELD_VALUE_FACTOR_FUNCTION + , GAUSS_DECAY_FUNCTION + , EXP_DECAY_FUNCTION + , LIN_DECAY_FUNCTION ); checkFiltersFunctionScoreExplanation(functionExplanation, "random score function (seed: 0)", 0); @@ -395,7 +398,7 @@ public class FunctionScoreTests extends ESTestCase { FiltersFunctionScoreQuery.FilterFunction[] filterFunctions = new FiltersFunctionScoreQuery.FilterFunction[scoreFunctions.length]; for (int i = 0; i < scoreFunctions.length; i++) { filterFunctions[i] = new FiltersFunctionScoreQuery.FilterFunction( - new TermQuery(TERM), scoreFunctions[i]); + new TermQuery(TERM), scoreFunctions[i]); } return new FiltersFunctionScoreQuery(new TermQuery(TERM), scoreMode, filterFunctions, Float.MAX_VALUE, Float.MAX_VALUE * -1, combineFunction); } @@ -560,4 +563,183 @@ public class FunctionScoreTests extends ESTestCase { float score = topDocsWithWeights.scoreDocs[0].score; assertThat(score, equalTo(2.0f)); } -} \ No newline at end of file + + public void testMinScoreExplain() throws IOException { + Query query = new MatchAllDocsQuery(); + Explanation queryExpl = searcher.explain(query, 0); + + FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, 0f, null, Float.POSITIVE_INFINITY); + Explanation fsqExpl = searcher.explain(fsq, 0); + assertTrue(fsqExpl.isMatch()); + assertEquals(queryExpl.getValue(), fsqExpl.getValue(), 0f); + assertEquals(queryExpl.getDescription(), fsqExpl.getDescription()); + + fsq = new FunctionScoreQuery(query, null, 10f, null, Float.POSITIVE_INFINITY); + fsqExpl = searcher.explain(fsq, 0); + assertFalse(fsqExpl.isMatch()); + assertEquals("Score value is too low, expected at least 10.0 but got 1.0", fsqExpl.getDescription()); + + FiltersFunctionScoreQuery ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, 0f, CombineFunction.MULTIPLY); + Explanation ffsqExpl = searcher.explain(ffsq, 0); + assertTrue(ffsqExpl.isMatch()); + assertEquals(queryExpl.getValue(), ffsqExpl.getValue(), 0f); + assertEquals(queryExpl.getDescription(), ffsqExpl.getDescription()); + + ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, 10f, CombineFunction.MULTIPLY); + ffsqExpl = searcher.explain(ffsq, 0); + assertFalse(ffsqExpl.isMatch()); + assertEquals("Score value is too low, expected at least 10.0 but got 1.0", ffsqExpl.getDescription()); + } + + public void testPropagatesApproximations() throws IOException { + Query query = new RandomApproximationQuery(new MatchAllDocsQuery(), random()); + IndexSearcher searcher = newSearcher(reader); + searcher.setQueryCache(null); // otherwise we could get a cached entry that does not have approximations + + FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, null, null, Float.POSITIVE_INFINITY); + for (boolean needsScores : new boolean[] {true, false}) { + Weight weight = searcher.createWeight(fsq, needsScores); + Scorer scorer = weight.scorer(reader.leaves().get(0)); + assertNotNull(scorer.twoPhaseIterator()); + } + + FiltersFunctionScoreQuery ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, null, CombineFunction.MULTIPLY); + for (boolean needsScores : new boolean[] {true, false}) { + Weight weight = searcher.createWeight(ffsq, needsScores); + Scorer scorer = weight.scorer(reader.leaves().get(0)); + assertNotNull(scorer.twoPhaseIterator()); + } + } + + public void testFunctionScoreHashCodeAndEquals() { + Float minScore = randomBoolean() ? null : 1.0f; + CombineFunction combineFunction = randomFrom(CombineFunction.values()); + float maxBoost = randomBoolean() ? Float.POSITIVE_INFINITY : randomFloat(); + ScoreFunction function = randomBoolean() ? null : new ScoreFunction(combineFunction) { + @Override + public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException { + return null; + } + + @Override + public boolean needsScores() { + return false; + } + @Override + protected boolean doEquals(ScoreFunction other) { + return other == this; + } + }; + + FunctionScoreQuery q = new FunctionScoreQuery(new TermQuery(new Term("foo", "bar")), function, minScore, combineFunction, maxBoost); + FunctionScoreQuery q1 = new FunctionScoreQuery(new TermQuery(new Term("foo", "bar")), function, minScore, combineFunction, maxBoost); + assertEquals(q, q); + assertEquals(q.hashCode(), q.hashCode()); + assertEquals(q, q1); + assertEquals(q.hashCode(), q1.hashCode()); + + FunctionScoreQuery diffQuery = new FunctionScoreQuery(new TermQuery(new Term("foo", "baz")), function, minScore, combineFunction, maxBoost); + FunctionScoreQuery diffMinScore = new FunctionScoreQuery(q.getSubQuery(), function, minScore == null ? 1.0f : null, combineFunction, maxBoost); + ScoreFunction otherFunciton = function == null ? new ScoreFunction(combineFunction) { + @Override + public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException { + return null; + } + + @Override + public boolean needsScores() { + return false; + } + + @Override + protected boolean doEquals(ScoreFunction other) { + return other == this; + } + + } : null; + FunctionScoreQuery diffFunction = new FunctionScoreQuery(q.getSubQuery(), otherFunciton, minScore, combineFunction, maxBoost); + FunctionScoreQuery diffMaxBoost = new FunctionScoreQuery(new TermQuery(new Term("foo", "bar")), function, minScore, combineFunction, maxBoost == 1.0f ? 0.9f : 1.0f); + q1.setBoost(3.0f); + FunctionScoreQuery[] queries = new FunctionScoreQuery[] { + diffFunction, + diffMinScore, + diffQuery, + q, + q1, + diffMaxBoost + }; + final int numIters = randomIntBetween(20, 100); + for (int i = 0; i < numIters; i++) { + FunctionScoreQuery left = randomFrom(queries); + FunctionScoreQuery right = randomFrom(queries); + if (left == right) { + assertEquals(left, right); + assertEquals(left.hashCode(), right.hashCode()); + } else { + assertNotEquals(left + " == " + right, left, right); + } + } + + } + + public void testFilterFunctionScoreHashCodeAndEquals() { + ScoreMode mode = randomFrom(ScoreMode.values()); + CombineFunction combineFunction = randomFrom(CombineFunction.values()); + ScoreFunction scoreFunction = new ScoreFunction(combineFunction) { + @Override + public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException { + return null; + } + + @Override + public boolean needsScores() { + return false; + } + + @Override + protected boolean doEquals(ScoreFunction other) { + return other == this; + } + }; + Float minScore = randomBoolean() ? null : 1.0f; + Float maxBoost = randomBoolean() ? Float.POSITIVE_INFINITY : randomFloat(); + + FilterFunction function = new FilterFunction(new TermQuery(new Term("filter", "query")), scoreFunction); + FiltersFunctionScoreQuery q = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost, minScore, combineFunction); + FiltersFunctionScoreQuery q1 = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost, minScore, combineFunction); + assertEquals(q, q); + assertEquals(q.hashCode(), q.hashCode()); + assertEquals(q, q1); + assertEquals(q.hashCode(), q1.hashCode()); + FiltersFunctionScoreQuery diffCombineFunc = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost, minScore, combineFunction == CombineFunction.AVG ? CombineFunction.MAX : CombineFunction.AVG); + FiltersFunctionScoreQuery diffQuery = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "baz")), mode, new FilterFunction[] {function}, maxBoost, minScore, combineFunction); + FiltersFunctionScoreQuery diffMode = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode == ScoreMode.AVG ? ScoreMode.FIRST : ScoreMode.AVG, new FilterFunction[] {function}, maxBoost, minScore, combineFunction); + FiltersFunctionScoreQuery diffMaxBoost = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost == 1.0f ? 0.9f : 1.0f, minScore, combineFunction); + FiltersFunctionScoreQuery diffMinScore = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost, minScore == null ? 0.9f : null, combineFunction); + FilterFunction otherFunc = new FilterFunction(new TermQuery(new Term("filter", "other_query")), scoreFunction); + FiltersFunctionScoreQuery diffFunc = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, randomBoolean() ? new FilterFunction[] {function, otherFunc} : new FilterFunction[] {otherFunc}, maxBoost, minScore, combineFunction); + q1.setBoost(3.0f); + + FiltersFunctionScoreQuery[] queries = new FiltersFunctionScoreQuery[] { + diffQuery, + diffMaxBoost, + diffMinScore, + diffMode, + diffFunc, + q, + q1, + diffCombineFunc + }; + final int numIters = randomIntBetween(20, 100); + for (int i = 0; i < numIters; i++) { + FiltersFunctionScoreQuery left = randomFrom(queries); + FiltersFunctionScoreQuery right = randomFrom(queries); + if (left == right) { + assertEquals(left, right); + assertEquals(left.hashCode(), right.hashCode()); + } else { + assertNotEquals(left + " == " + right, left, right); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/support/QueryInnerHitsTests.java b/core/src/test/java/org/elasticsearch/index/query/support/QueryInnerHitsTests.java index 2c4e3171932..efcf17b4af5 100644 --- a/core/src/test/java/org/elasticsearch/index/query/support/QueryInnerHitsTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/support/QueryInnerHitsTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java index 1aa0978e6c8..236198261d7 100644 --- a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -20,7 +20,12 @@ package org.elasticsearch.index.search; import org.apache.lucene.index.Term; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; diff --git a/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java b/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java index c283150c30d..0bfeae67560 100644 --- a/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.search.geo; import com.spatial4j.core.context.SpatialContext; import com.spatial4j.core.distance.DistanceUtils; - import org.apache.lucene.spatial.prefix.tree.Cell; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java index 76f20afc696..b7f2dd09f3c 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java @@ -22,7 +22,13 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FieldFilterLeafReader; +import org.apache.lucene.index.FilterDirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 6159147a399..6ab35e773a8 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -46,7 +46,13 @@ import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.AllocationId; +import org.elasticsearch.cluster.routing.RestoreSource; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingHelper; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -63,6 +69,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; @@ -71,16 +78,23 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.indexing.IndexingOperationListener; import org.elasticsearch.index.indexing.ShardIndexingService; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapping; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoveryState; -import org.elasticsearch.test.*; +import org.elasticsearch.test.DummyShardLock; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.FieldMaskingReader; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.nio.file.Files; @@ -94,11 +108,16 @@ import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.cluster.metadata.IndexMetaData.*; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.equalTo; /** @@ -133,7 +152,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ShardId id = new ShardId("foo", 1); long version = between(1, Integer.MAX_VALUE / 2); boolean primary = randomBoolean(); - AllocationId allocationId = randomAllocationId(); + AllocationId allocationId = randomBoolean() ? null : randomAllocationId(); ShardStateMetaData state1 = new ShardStateMetaData(version, primary, "foo", allocationId); write(state1, env.availableShardPaths(id)); ShardStateMetaData shardStateMetaData = load(logger, env.availableShardPaths(id)); @@ -288,7 +307,8 @@ public class IndexShardTests extends ESSingleNodeTestCase { } public void testShardStateMetaHashCodeEquals() { - ShardStateMetaData meta = new ShardStateMetaData(randomLong(), randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), randomAllocationId()); + AllocationId allocationId = randomBoolean() ? null : randomAllocationId(); + ShardStateMetaData meta = new ShardStateMetaData(randomLong(), randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), allocationId); assertEquals(meta, new ShardStateMetaData(meta.version, meta.primary, meta.indexUUID, meta.allocationId)); assertEquals(meta.hashCode(), new ShardStateMetaData(meta.version, meta.primary, meta.indexUUID, meta.allocationId).hashCode()); @@ -299,7 +319,8 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertFalse(meta.equals(new ShardStateMetaData(meta.version, !meta.primary, meta.indexUUID + "foo", randomAllocationId()))); Set hashCodes = new HashSet<>(); for (int i = 0; i < 30; i++) { // just a sanity check that we impl hashcode - meta = new ShardStateMetaData(randomLong(), randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), randomAllocationId()); + allocationId = randomBoolean() ? null : randomAllocationId(); + meta = new ShardStateMetaData(randomLong(), randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), allocationId); hashCodes.add(meta.hashCode()); } assertTrue("more than one unique hashcode expected but got: " + hashCodes.size(), hashCodes.size() > 1); @@ -371,35 +392,35 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); IndexShard shard = test.getShardOrNull(0); - setDurability(shard, Translog.Durabilty.REQUEST); + setDurability(shard, Translog.Durability.REQUEST); assertFalse(shard.getEngine().getTranslog().syncNeeded()); - setDurability(shard, Translog.Durabilty.ASYNC); + setDurability(shard, Translog.Durability.ASYNC); client().prepareIndex("test", "bar", "2").setSource("{}").get(); assertTrue(shard.getEngine().getTranslog().syncNeeded()); - setDurability(shard, Translog.Durabilty.REQUEST); + setDurability(shard, Translog.Durability.REQUEST); client().prepareDelete("test", "bar", "1").get(); assertFalse(shard.getEngine().getTranslog().syncNeeded()); - setDurability(shard, Translog.Durabilty.ASYNC); + setDurability(shard, Translog.Durability.ASYNC); client().prepareDelete("test", "bar", "2").get(); assertTrue(shard.getEngine().getTranslog().syncNeeded()); - setDurability(shard, Translog.Durabilty.REQUEST); + setDurability(shard, Translog.Durability.REQUEST); assertNoFailures(client().prepareBulk() .add(client().prepareIndex("test", "bar", "3").setSource("{}")) .add(client().prepareDelete("test", "bar", "1")).get()); assertFalse(shard.getEngine().getTranslog().syncNeeded()); - setDurability(shard, Translog.Durabilty.ASYNC); + setDurability(shard, Translog.Durability.ASYNC); assertNoFailures(client().prepareBulk() .add(client().prepareIndex("test", "bar", "4").setSource("{}")) .add(client().prepareDelete("test", "bar", "3")).get()); - setDurability(shard, Translog.Durabilty.REQUEST); + setDurability(shard, Translog.Durability.REQUEST); assertTrue(shard.getEngine().getTranslog().syncNeeded()); } - private void setDurability(IndexShard shard, Translog.Durabilty durabilty) { - client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, durabilty.name()).build()).get(); - assertEquals(durabilty, shard.getTranslogDurability()); + private void setDurability(IndexShard shard, Translog.Durability durability) { + client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, durability.name()).build()).get(); + assertEquals(durability, shard.getTranslogDurability()); } public void testMinimumCompatVersion() { @@ -671,13 +692,13 @@ public class IndexShardTests extends ESSingleNodeTestCase { } public void testMaybeFlush() throws Exception { - createIndex("test", settingsBuilder().put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.REQUEST).build()); + createIndex("test", settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.REQUEST).build()); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, 1).build()).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(133 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); assertFalse(shard.shouldFlush()); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, new ParseContext.Document(), new BytesArray(new byte[]{1}), null); @@ -693,8 +714,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { shard.getEngine().getTranslog().sync(); long size = shard.getEngine().getTranslog().sizeInBytes(); logger.info("--> current translog size: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, 1000) - .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(size, ByteSizeUnit.BYTES)) + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(size, ByteSizeUnit.BYTES)) .build()).get(); client().prepareDelete("test", "test", "2").get(); logger.info("--> translog size after delete: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration()); @@ -712,7 +732,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexService test = indicesService.indexService("test"); final IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, 1).build()).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(133/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); assertFalse(shard.shouldFlush()); final AtomicBoolean running = new AtomicBoolean(true); @@ -965,7 +985,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexShard newShard = reinitWithWrapper(indexService, shard, wrapper); try { // test global ordinals are evicted - MappedFieldType foo = newShard.mapperService().indexName("foo"); + MappedFieldType foo = newShard.mapperService().fullName("foo"); IndexFieldData.Global ifd = shard.indexFieldDataService().getForField(foo); FieldDataStats before = shard.fieldData().stats("foo"); assertThat(before.getMemorySizeInBytes(), equalTo(0l)); diff --git a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java index 449fd1df96b..c3a2d65748c 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java @@ -40,7 +40,11 @@ import java.nio.file.Path; import java.nio.file.attribute.FileAttributeView; import java.nio.file.attribute.FileStoreAttributeView; import java.nio.file.spi.FileSystemProvider; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** Separate test class from ShardPathTests because we need static (BeforeClass) setup to install mock filesystems... */ public class NewPathForShardTests extends ESTestCase { @@ -71,7 +75,7 @@ public class NewPathForShardTests extends ESTestCase { /** Mock file system that fakes usable space for each FileStore */ static class MockUsableSpaceFileSystemProvider extends FilterFileSystemProvider { - + public MockUsableSpaceFileSystemProvider(FileSystem inner) { super("mockusablespace://", inner); final List fileStores = new ArrayList<>(); @@ -98,7 +102,7 @@ public class NewPathForShardTests extends ESTestCase { public MockFileStore(String desc) { this.desc = desc; } - + @Override public String type() { return "mock"; @@ -204,7 +208,7 @@ public class NewPathForShardTests extends ESTestCase { // had the most free space, never using the other drive unless new shards arrive // after the first shards started using storage: assertNotEquals(result1.getDataPath(), result2.getDataPath()); - + nodeEnv.close(); } } diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java index 3422c66a3e7..a59dcb49aca 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java @@ -21,7 +21,10 @@ package org.elasticsearch.index.shard; import org.apache.lucene.document.Document; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.CompositeReaderContext; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; diff --git a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index 965916284a3..dd5ca6bcc51 100644 --- a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -19,22 +19,28 @@ package org.elasticsearch.index.similarity; +import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.AfterEffectL; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.BasicModelG; import org.apache.lucene.search.similarities.DFRSimilarity; -import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.DistributionSPL; import org.apache.lucene.search.similarities.IBSimilarity; import org.apache.lucene.search.similarities.LMDirichletSimilarity; import org.apache.lucene.search.similarities.LMJelinekMercerSimilarity; import org.apache.lucene.search.similarities.LambdaTTF; import org.apache.lucene.search.similarities.NormalizationH2; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; @@ -44,44 +50,45 @@ import static org.hamcrest.CoreMatchers.instanceOf; public class SimilarityTests extends ESSingleNodeTestCase { public void testResolveDefaultSimilarities() { SimilarityService similarityService = createIndex("foo").similarityService(); - assertThat(similarityService.getSimilarity("default").get(), instanceOf(DefaultSimilarity.class)); + assertThat(similarityService.getSimilarity("classic").get(), instanceOf(ClassicSimilarity.class)); assertThat(similarityService.getSimilarity("BM25").get(), instanceOf(BM25Similarity.class)); + assertThat(similarityService.getSimilarity("default"), equalTo(null)); } - public void testResolveSimilaritiesFromMapping_default() throws IOException { + public void testResolveSimilaritiesFromMapping_classic() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "default") - .put("index.similarity.my_similarity.discount_overlaps", false) - .build(); + .put("index.similarity.my_similarity.type", "classic") + .put("index.similarity.my_similarity.discount_overlaps", false) + .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(DefaultSimilarityProvider.class)); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(ClassicSimilarityProvider.class)); - DefaultSimilarity similarity = (DefaultSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + ClassicSimilarity similarity = (ClassicSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); } public void testResolveSimilaritiesFromMapping_bm25() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "BM25") - .put("index.similarity.my_similarity.k1", 2.0f) - .put("index.similarity.my_similarity.b", 1.5f) - .put("index.similarity.my_similarity.discount_overlaps", false) - .build(); + .put("index.similarity.my_similarity.type", "BM25") + .put("index.similarity.my_similarity.k1", 2.0f) + .put("index.similarity.my_similarity.b", 1.5f) + .put("index.similarity.my_similarity.discount_overlaps", false) + .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(BM25SimilarityProvider.class)); BM25Similarity similarity = (BM25Similarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); @@ -92,20 +99,20 @@ public class SimilarityTests extends ESSingleNodeTestCase { public void testResolveSimilaritiesFromMapping_DFR() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "DFR") - .put("index.similarity.my_similarity.basic_model", "g") - .put("index.similarity.my_similarity.after_effect", "l") - .put("index.similarity.my_similarity.normalization", "h2") - .put("index.similarity.my_similarity.normalization.h2.c", 3f) - .build(); + .put("index.similarity.my_similarity.type", "DFR") + .put("index.similarity.my_similarity.basic_model", "g") + .put("index.similarity.my_similarity.after_effect", "l") + .put("index.similarity.my_similarity.normalization", "h2") + .put("index.similarity.my_similarity.normalization.h2.c", 3f) + .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(DFRSimilarityProvider.class)); DFRSimilarity similarity = (DFRSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); @@ -117,20 +124,20 @@ public class SimilarityTests extends ESSingleNodeTestCase { public void testResolveSimilaritiesFromMapping_IB() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "IB") - .put("index.similarity.my_similarity.distribution", "spl") - .put("index.similarity.my_similarity.lambda", "ttf") - .put("index.similarity.my_similarity.normalization", "h2") - .put("index.similarity.my_similarity.normalization.h2.c", 3f) - .build(); + .put("index.similarity.my_similarity.type", "IB") + .put("index.similarity.my_similarity.distribution", "spl") + .put("index.similarity.my_similarity.lambda", "ttf") + .put("index.similarity.my_similarity.normalization", "h2") + .put("index.similarity.my_similarity.normalization.h2.c", 3f) + .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(IBSimilarityProvider.class)); IBSimilarity similarity = (IBSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); @@ -142,17 +149,17 @@ public class SimilarityTests extends ESSingleNodeTestCase { public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "LMDirichlet") - .put("index.similarity.my_similarity.mu", 3000f) - .build(); + .put("index.similarity.my_similarity.type", "LMDirichlet") + .put("index.similarity.my_similarity.mu", 3000f) + .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(LMDirichletSimilarityProvider.class)); LMDirichletSimilarity similarity = (LMDirichletSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); @@ -161,20 +168,63 @@ public class SimilarityTests extends ESSingleNodeTestCase { public void testResolveSimilaritiesFromMapping_LMJelinekMercer() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); Settings indexSettings = Settings.settingsBuilder() - .put("index.similarity.my_similarity.type", "LMJelinekMercer") - .put("index.similarity.my_similarity.lambda", 0.7f) - .build(); + .put("index.similarity.my_similarity.type", "LMJelinekMercer") + .put("index.similarity.my_similarity.lambda", 0.7f) + .build(); IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(LMJelinekMercerSimilarityProvider.class)); LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getLambda(), equalTo(0.7f)); } + + public void testResolveSimilaritiesFromMapping_Unknown() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "unknown_similarity").endObject() + .endObject() + .endObject().endObject().string(); + + IndexService indexService = createIndex("foo"); + try { + indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + fail("Expected MappingParsingException"); + } catch (MapperParsingException e) { + assertThat(e.getMessage(), equalTo("Unknown Similarity type [unknown_similarity] for [field1]")); + } + } + + public void testSimilarityDefaultBackCompat() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field1") + .field("similarity", "default") + .field("type", "string") + .endObject() + .endObject() + .endObject().string(); + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_2_0)) + .build(); + + DocumentMapperParser parser = createIndex("test_v2.x", settings).mapperService().documentMapperParser(); + DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(ClassicSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().name(), equalTo("classic")); + + parser = createIndex("test_v3.x").mapperService().documentMapperParser(); + try { + parser.parse("type", new CompressedXContent(mapping)); + fail("Expected MappingParsingException"); + } catch (MapperParsingException e) { + assertThat(e.getMessage(), equalTo("Unknown Similarity type [default] for [field1]")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java index 38fc17c777e..4bd8ba9cb3e 100644 --- a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java +++ b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.snapshots.blobstore; import com.carrotsearch.randomizedtesting.generators.RandomInts; - import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 53d18eaef81..1dfe8514502 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -33,7 +33,11 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.Nullable; @@ -43,8 +47,13 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.gateway.PrimaryShardAllocator; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardState; +import org.elasticsearch.index.shard.MergePolicyConfig; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.recovery.RecoveryFileChunkRequest; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.RecoveryTarget; @@ -69,7 +78,14 @@ import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -78,8 +94,16 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) public class CorruptedFileIT extends ESIntegTestCase { @@ -90,9 +114,8 @@ public class CorruptedFileIT extends ESIntegTestCase { // and we need to make sure primaries are not just trashed if we don't have replicas .put(super.nodeSettings(nodeOrdinal)) // speed up recoveries - .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, 10) - .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 10) - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, 5) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 5) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 5) .build(); } @@ -119,7 +142,7 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "1") .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files + .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files .put("indices.recovery.concurrent_streams", 10) )); ensureGreen(); @@ -224,7 +247,7 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files + .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files .put("indices.recovery.concurrent_streams", 10) )); ensureGreen(); @@ -450,7 +473,7 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") // no replicas for this test .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files + .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files .put("indices.recovery.concurrent_streams", 10) )); ensureGreen(); @@ -505,7 +528,7 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, cluster().numDataNodes() - 1) .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files + .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files .put("indices.recovery.concurrent_streams", 10) )); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index 653a7d04e9e..d712d846c47 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.store; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -28,15 +27,17 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.engine.MockEngineSupport; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import java.io.IOException; @@ -51,6 +52,7 @@ import java.util.Collection; import java.util.List; import java.util.Set; import java.util.TreeSet; +import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -69,7 +71,6 @@ public class CorruptedTranslogIT extends ESIntegTestCase { return pluginList(MockTransportService.TestPlugin.class); } - @TestLogging("index.translog:TRACE,index.gateway:TRACE") public void testCorruptTranslogFiles() throws Exception { internalCluster().startNodesAsync(1, Settings.EMPTY).get(); @@ -79,7 +80,6 @@ public class CorruptedTranslogIT extends ESIntegTestCase { .put("index.refresh_interval", "-1") .put(MockEngineSupport.FLUSH_ON_CLOSE_RATIO, 0.0d) // never flush - always recover from translog .put(IndexShard.INDEX_FLUSH_ON_CLOSE, false) // never flush - always recover from translog - .put(TranslogConfig.INDEX_TRANSLOG_SYNC_INTERVAL, "1s") // fsync the translog every second )); ensureYellow(); @@ -97,14 +97,13 @@ public class CorruptedTranslogIT extends ESIntegTestCase { // Restart the single node internalCluster().fullRestart(); - // node needs time to start recovery and discover the translog corruption - Thread.sleep(1000); - enableTranslogFlush("test"); + client().admin().cluster().prepareHealth().setWaitForYellowStatus().setTimeout(new TimeValue(1000, TimeUnit.MILLISECONDS)).setWaitForEvents(Priority.LANGUID).get(); try { client().prepareSearch("test").setQuery(matchAllQuery()).get(); fail("all shards should be failed due to a corrupted translog"); } catch (SearchPhaseExecutionException e) { + e.printStackTrace(); // Good, all shards should be failed because there is only a // single shard and its translog is corrupt } @@ -168,4 +167,16 @@ public class CorruptedTranslogIT extends ESIntegTestCase { } assertThat("no file corrupted", fileToCorrupt, notNullValue()); } + + /** Disables translog flushing for the specified index */ + private static void disableTranslogFlush(String index) { + Settings settings = Settings.builder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)).build(); + client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); + } + + /** Enables translog flushing for the specified index */ + private static void enableTranslogFlush(String index) { + Settings settings = Settings.builder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB)).build(); + client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); + } } diff --git a/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java b/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java index f5b7fc250aa..234de11b516 100644 --- a/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.bulk.TransportShardBulkAction; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.discovery.Discovery; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java index ee3ad6b8b29..f27d9ddcd2b 100644 --- a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java @@ -19,7 +19,12 @@ package org.elasticsearch.index.store; import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.lucene.store.*; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FileSwitchDirectory; +import org.apache.lucene.store.MMapDirectory; +import org.apache.lucene.store.NIOFSDirectory; +import org.apache.lucene.store.NoLockFactory; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; diff --git a/core/src/test/java/org/elasticsearch/index/store/LegacyVerificationTests.java b/core/src/test/java/org/elasticsearch/index/store/LegacyVerificationTests.java index 3d9c4f732bf..c65a02ed692 100644 --- a/core/src/test/java/org/elasticsearch/index/store/LegacyVerificationTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/LegacyVerificationTests.java @@ -19,50 +19,50 @@ package org.elasticsearch.index.store; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexOutput; +import org.elasticsearch.test.ESTestCase; + import java.nio.charset.StandardCharsets; import java.util.zip.Adler32; -import org.apache.lucene.index.CorruptIndexException; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.IOContext; -import org.apache.lucene.store.Directory; -import org.elasticsearch.test.ESTestCase; - -/** +/** * Simple tests for LegacyVerification (old segments) - * @deprecated remove this test when support for lucene 4.x - * segments is not longer needed. + * @deprecated remove this test when support for lucene 4.x + * segments is not longer needed. */ @Deprecated public class LegacyVerificationTests extends ESTestCase { - + public void testAdler32() throws Exception { Adler32 expected = new Adler32(); byte bytes[] = "abcdefgh".getBytes(StandardCharsets.UTF_8); expected.update(bytes); String expectedString = Store.digestToString(expected.getValue()); - + Directory dir = newDirectory(); - + IndexOutput o = dir.createOutput("legacy", IOContext.DEFAULT); VerifyingIndexOutput out = new LegacyVerification.Adler32VerifyingIndexOutput(o, expectedString, 8); out.writeBytes(bytes, 0, bytes.length); out.verify(); out.close(); out.verify(); - + dir.close(); } - + public void testAdler32Corrupt() throws Exception { Adler32 expected = new Adler32(); byte bytes[] = "abcdefgh".getBytes(StandardCharsets.UTF_8); expected.update(bytes); String expectedString = Store.digestToString(expected.getValue()); - + byte corruptBytes[] = "abcdefch".getBytes(StandardCharsets.UTF_8); Directory dir = newDirectory(); - + IndexOutput o = dir.createOutput("legacy", IOContext.DEFAULT); VerifyingIndexOutput out = new LegacyVerification.Adler32VerifyingIndexOutput(o, expectedString, 8); out.writeBytes(corruptBytes, 0, bytes.length); @@ -73,33 +73,33 @@ public class LegacyVerificationTests extends ESTestCase { // expected exception } out.close(); - + try { out.verify(); fail(); } catch (CorruptIndexException e) { // expected exception } - + dir.close(); } - + public void testLengthOnlyOneByte() throws Exception { Directory dir = newDirectory(); - + IndexOutput o = dir.createOutput("oneByte", IOContext.DEFAULT); VerifyingIndexOutput out = new LegacyVerification.LengthVerifyingIndexOutput(o, 1); out.writeByte((byte) 3); out.verify(); out.close(); out.verify(); - + dir.close(); } - + public void testLengthOnlyCorrupt() throws Exception { Directory dir = newDirectory(); - + IndexOutput o = dir.createOutput("oneByte", IOContext.DEFAULT); VerifyingIndexOutput out = new LegacyVerification.LengthVerifyingIndexOutput(o, 2); out.writeByte((byte) 3); @@ -109,16 +109,16 @@ public class LegacyVerificationTests extends ESTestCase { } catch (CorruptIndexException expected) { // expected exception } - + out.close(); - + try { out.verify(); fail(); } catch (CorruptIndexException expected) { // expected exception } - + dir.close(); } } diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index 1e1e9487668..acaa1cf8b5d 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -24,9 +24,35 @@ import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.SegmentInfoFormat; import org.apache.lucene.codecs.lucene50.Lucene50SegmentInfoFormat; import org.apache.lucene.codecs.lucene54.Lucene54Codec; -import org.apache.lucene.document.*; -import org.apache.lucene.index.*; -import org.apache.lucene.store.*; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.document.StringField; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.IndexFormatTooNewException; +import org.apache.lucene.index.IndexFormatTooOldException; +import org.apache.lucene.index.IndexNotFoundException; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy; +import org.apache.lucene.index.NoDeletionPolicy; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.Term; +import org.apache.lucene.store.AlreadyClosedException; +import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.TestUtil; @@ -57,14 +83,29 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.NoSuchFileException; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.zip.Adler32; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.test.VersionUtils.randomVersion; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class StoreTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/translog/BufferedTranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/BufferedTranslogTests.java deleted file mode 100644 index a29cc6cf8d0..00000000000 --- a/core/src/test/java/org/elasticsearch/index/translog/BufferedTranslogTests.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.translog; - -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.test.IndexSettingsModule; - -import java.io.IOException; -import java.nio.file.Path; - -/** - * - */ -public class BufferedTranslogTests extends TranslogTests { - - @Override - protected TranslogConfig getTranslogConfig(Path path) { - Settings build = Settings.settingsBuilder() - .put("index.translog.fs.type", TranslogWriter.Type.BUFFERED.name()) - .put("index.translog.fs.buffer_size", 10 + randomInt(128 * 1024), ByteSizeUnit.BYTES) - .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) - .build(); - return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, null); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index e35c04dcd6b..1da2b7bf3c8 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.mockfile.FilterFileChannel; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.ByteArrayDataOutput; +import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LuceneTestCase; @@ -35,6 +36,8 @@ import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -56,14 +59,29 @@ import java.nio.file.Files; import java.nio.file.InvalidPathException; import java.nio.file.Path; import java.nio.file.StandardOpenOption; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; /** * @@ -116,12 +134,12 @@ public class TranslogTests extends ESTestCase { return new Translog(getTranslogConfig(path)); } - protected TranslogConfig getTranslogConfig(Path path) { + private TranslogConfig getTranslogConfig(Path path) { Settings build = Settings.settingsBuilder() - .put(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, TranslogWriter.Type.SIMPLE.name()) .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) .build(); - return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, null); + ByteSizeValue bufferSize = randomBoolean() ? TranslogConfig.DEFAULT_BUFFER_SIZE : new ByteSizeValue(10 + randomInt(128 * 1024), ByteSizeUnit.BYTES); + return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), BigArrays.NON_RECYCLING_INSTANCE, bufferSize); } protected void addToTranslogAndList(Translog translog, ArrayList list, Translog.Operation op) throws IOException { @@ -1242,11 +1260,11 @@ public class TranslogTests extends ESTestCase { private final CountDownLatch downLatch; private final int opsPerThread; private final int threadId; - private final BlockingQueue writtenOperations; + private final Collection writtenOperations; private final Throwable[] threadExceptions; private final Translog translog; - public TranslogThread(Translog translog, CountDownLatch downLatch, int opsPerThread, int threadId, BlockingQueue writtenOperations, Throwable[] threadExceptions) { + public TranslogThread(Translog translog, CountDownLatch downLatch, int opsPerThread, int threadId, Collection writtenOperations, Throwable[] threadExceptions) { this.translog = translog; this.downLatch = downLatch; this.opsPerThread = opsPerThread; @@ -1276,76 +1294,58 @@ public class TranslogTests extends ESTestCase { throw new ElasticsearchException("not supported op type"); } - Translog.Location loc = translog.add(op); + Translog.Location loc = add(op); writtenOperations.add(new LocationOperation(op, loc)); + afterAdd(); } } catch (Throwable t) { threadExceptions[threadId] = t; } } + + protected Translog.Location add(Translog.Operation op) throws IOException { + return translog.add(op); + } + + protected void afterAdd() throws IOException {} } public void testFailFlush() throws IOException { Path tempDir = createTempDir(); - final AtomicBoolean simulateDiskFull = new AtomicBoolean(); + final AtomicBoolean fail = new AtomicBoolean(); TranslogConfig config = getTranslogConfig(tempDir); - Translog translog = new Translog(config) { - @Override - TranslogWriter.ChannelFactory getChannelFactory() { - final TranslogWriter.ChannelFactory factory = super.getChannelFactory(); - - return new TranslogWriter.ChannelFactory() { - @Override - public FileChannel open(Path file) throws IOException { - FileChannel channel = factory.open(file); - return new FilterFileChannel(channel) { - - @Override - public int write(ByteBuffer src) throws IOException { - if (simulateDiskFull.get()) { - if (src.limit() > 1) { - final int pos = src.position(); - final int limit = src.limit(); - src.limit(limit / 2); - super.write(src); - src.position(pos); - src.limit(limit); - throw new IOException("__FAKE__ no space left on device"); - } - } - return super.write(src); - } - }; - } - }; - } - }; + Translog translog = getFailableTranslog(fail, config); List locations = new ArrayList<>(); int opsSynced = 0; - int opsAdded = 0; boolean failed = false; while(failed == false) { try { locations.add(translog.add(new Translog.Index("test", "" + opsSynced, Integer.toString(opsSynced).getBytes(Charset.forName("UTF-8"))))); - opsAdded++; translog.sync(); opsSynced++; + } catch (MockDirectoryWrapper.FakeIOException ex) { + failed = true; + assertFalse(translog.isOpen()); } catch (IOException ex) { failed = true; assertFalse(translog.isOpen()); assertEquals("__FAKE__ no space left on device", ex.getMessage()); } - simulateDiskFull.set(randomBoolean()); + fail.set(randomBoolean()); } - simulateDiskFull.set(false); + fail.set(false); if (randomBoolean()) { try { locations.add(translog.add(new Translog.Index("test", "" + opsSynced, Integer.toString(opsSynced).getBytes(Charset.forName("UTF-8"))))); fail("we are already closed"); } catch (AlreadyClosedException ex) { assertNotNull(ex.getCause()); - assertEquals(ex.getCause().getMessage(), "__FAKE__ no space left on device"); + if (ex.getCause() instanceof MockDirectoryWrapper.FakeIOException) { + assertNull(ex.getCause().getMessage()); + } else { + assertEquals(ex.getCause().getMessage(), "__FAKE__ no space left on device"); + } } } @@ -1402,4 +1402,215 @@ public class TranslogTests extends ESTestCase { } } } + + public void testTragicEventCanBeAnyException() throws IOException { + Path tempDir = createTempDir(); + final AtomicBoolean fail = new AtomicBoolean(); + TranslogConfig config = getTranslogConfig(tempDir); + assumeFalse("this won't work if we sync on any op",config.isSyncOnEachOperation()); + Translog translog = getFailableTranslog(fail, config, false, true); + LineFileDocs lineFileDocs = new LineFileDocs(random()); // writes pretty big docs so we cross buffer boarders regularly + translog.add(new Translog.Index("test", "1", lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))); + fail.set(true); + try { + Translog.Location location = translog.add(new Translog.Index("test", "2", lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))); + if (randomBoolean()) { + translog.ensureSynced(location); + } else { + translog.sync(); + } + //TODO once we have a mock FS that can simulate we can also fail on plain sync + fail("WTF"); + } catch (UnknownException ex) { + // w00t + } catch (TranslogException ex) { + assertTrue(ex.getCause() instanceof UnknownException); + } + assertFalse(translog.isOpen()); + assertTrue(translog.getTragicException() instanceof UnknownException); + } + + public void testFatalIOExceptionsWhileWritingConcurrently() throws IOException, InterruptedException { + Path tempDir = createTempDir(); + final AtomicBoolean fail = new AtomicBoolean(false); + + TranslogConfig config = getTranslogConfig(tempDir); + Translog translog = getFailableTranslog(fail, config); + + final int threadCount = randomIntBetween(1, 5); + Thread[] threads = new Thread[threadCount]; + final Throwable[] threadExceptions = new Throwable[threadCount]; + final CountDownLatch downLatch = new CountDownLatch(1); + final CountDownLatch added = new CountDownLatch(randomIntBetween(10, 100)); + List writtenOperations = Collections.synchronizedList(new ArrayList<>()); + for (int i = 0; i < threadCount; i++) { + final int threadId = i; + threads[i] = new TranslogThread(translog, downLatch, 200, threadId, writtenOperations, threadExceptions) { + @Override + protected Translog.Location add(Translog.Operation op) throws IOException { + Translog.Location add = super.add(op); + added.countDown(); + return add; + } + + @Override + protected void afterAdd() throws IOException { + if (randomBoolean()) { + translog.sync(); + } + } + }; + threads[i].setDaemon(true); + threads[i].start(); + } + downLatch.countDown(); + added.await(); + try (Translog.View view = translog.newView()) { + // this holds a reference to the current tlog channel such that it's not closed + // if we hit a tragic event. this is important to ensure that asserts inside the Translog#add doesn't trip + // otherwise our assertions here are off by one sometimes. + fail.set(true); + for (int i = 0; i < threadCount; i++) { + threads[i].join(); + } + boolean atLeastOneFailed = false; + for (Throwable ex : threadExceptions) { + assertTrue(ex.toString(), ex instanceof IOException || ex instanceof AlreadyClosedException); + if (ex != null) { + atLeastOneFailed = true; + } + } + if (atLeastOneFailed == false) { + try { + boolean syncNeeded = translog.syncNeeded(); + translog.close(); + assertFalse("should have failed if sync was needed", syncNeeded); + } catch (IOException ex) { + // boom now we failed + } + } + Collections.sort(writtenOperations, (a, b) -> a.location.compareTo(b.location)); + assertFalse(translog.isOpen()); + final Checkpoint checkpoint = Checkpoint.read(config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME)); + Iterator iterator = writtenOperations.iterator(); + while (iterator.hasNext()) { + LocationOperation next = iterator.next(); + if (checkpoint.offset < (next.location.translogLocation + next.location.size)) { + // drop all that haven't been synced + iterator.remove(); + } + } + config.setTranslogGeneration(translog.getGeneration()); + try (Translog tlog = new Translog(config)) { + try (Translog.Snapshot snapshot = tlog.newSnapshot()) { + if (writtenOperations.size() != snapshot.estimatedTotalOperations()) { + for (int i = 0; i < threadCount; i++) { + if (threadExceptions[i] != null) + threadExceptions[i].printStackTrace(); + } + } + assertEquals(writtenOperations.size(), snapshot.estimatedTotalOperations()); + for (int i = 0; i < writtenOperations.size(); i++) { + assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, writtenOperations.get(i).location.generation); + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals(next, writtenOperations.get(i).operation); + } + } + } + } + } + private Translog getFailableTranslog(final AtomicBoolean fail, final TranslogConfig config) throws IOException { + return getFailableTranslog(fail, config, randomBoolean(), false); + } + + private Translog getFailableTranslog(final AtomicBoolean fail, final TranslogConfig config, final boolean paritalWrites, final boolean throwUnknownException) throws IOException { + return new Translog(config) { + @Override + TranslogWriter.ChannelFactory getChannelFactory() { + final TranslogWriter.ChannelFactory factory = super.getChannelFactory(); + + return new TranslogWriter.ChannelFactory() { + @Override + public FileChannel open(Path file) throws IOException { + FileChannel channel = factory.open(file); + return new ThrowingFileChannel(fail, paritalWrites, throwUnknownException, channel); + } + }; + } + }; + } + + public static class ThrowingFileChannel extends FilterFileChannel { + private final AtomicBoolean fail; + private final boolean partialWrite; + private final boolean throwUnknownException; + + public ThrowingFileChannel(AtomicBoolean fail, boolean partialWrite, boolean throwUnknownException, FileChannel delegate) { + super(delegate); + this.fail = fail; + this.partialWrite = partialWrite; + this.throwUnknownException = throwUnknownException; + } + + @Override + public long write(ByteBuffer[] srcs, int offset, int length) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int write(ByteBuffer src, long position) throws IOException { + throw new UnsupportedOperationException(); + } + + + public int write(ByteBuffer src) throws IOException { + if (fail.get()) { + if (partialWrite) { + if (src.hasRemaining()) { + final int pos = src.position(); + final int limit = src.limit(); + src.limit(randomIntBetween(pos, limit)); + super.write(src); + src.limit(limit); + src.position(pos); + throw new IOException("__FAKE__ no space left on device"); + } + } + if (throwUnknownException) { + throw new UnknownException(); + } else { + throw new MockDirectoryWrapper.FakeIOException(); + } + } + return super.write(src); + } + } + + private static final class UnknownException extends RuntimeException { + + } + + // see https://github.com/elastic/elasticsearch/issues/15754 + public void testFailWhileCreateWriteWithRecoveredTLogs() throws IOException { + Path tempDir = createTempDir(); + TranslogConfig config = getTranslogConfig(tempDir); + Translog translog = new Translog(config); + translog.add(new Translog.Index("test", "boom", "boom".getBytes(Charset.forName("UTF-8")))); + Translog.TranslogGeneration generation = translog.getGeneration(); + translog.close(); + config.setTranslogGeneration(generation); + try { + new Translog(config) { + @Override + protected TranslogWriter createWriter(long fileGeneration) throws IOException { + throw new MockDirectoryWrapper.FakeIOException(); + } + }; + // if we have a LeakFS here we fail if not all resources are closed + fail("should have been failed"); + } catch (MockDirectoryWrapper.FakeIOException ex) { + // all is well + } + } } diff --git a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java index 818937c511e..8abe19ffbb6 100644 --- a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java @@ -43,9 +43,15 @@ import static org.elasticsearch.client.Requests.clusterHealthRequest; import static org.elasticsearch.client.Requests.createIndexRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; /** diff --git a/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java index 792f14bce1e..488472c26df 100644 --- a/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java @@ -25,13 +25,12 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java index 8de3af25827..4f6aaf25705 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java @@ -170,14 +170,14 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase { //add a node: 3 out of the 6 shards will be relocated to it //disable allocation before starting a new node, as we need to register the listener first assertAcked(client().admin().cluster().prepareUpdateSettings() - .setPersistentSettings(builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none"))); + .setPersistentSettings(builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); String node2 = internalCluster().startNode(); IndexShardStateChangeListener stateChangeListenerNode2 = new IndexShardStateChangeListener(); //add a listener that keeps track of the shard state changes internalCluster().getInstance(MockIndexEventListener.TestEventListener.class, node2).setNewDelegate(stateChangeListenerNode2); //re-enable allocation assertAcked(client().admin().cluster().prepareUpdateSettings() - .setPersistentSettings(builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "all"))); + .setPersistentSettings(builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all"))); ensureGreen(); //the 3 relocated shards get closed on the first node diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index 968266f00c3..ec182a69890 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -19,14 +19,13 @@ package org.elasticsearch.indices; -import org.apache.lucene.analysis.hunspell.Dictionary; import org.elasticsearch.common.inject.ModuleTestCase; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.TermQueryParser; import java.io.IOException; -import java.io.InputStream; -import java.util.Collections; public class IndicesModuleTests extends ModuleTestCase { diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 66cb5e7ea7d..2723f49a77a 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -76,7 +76,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1", "test2"), true); verify(stats("test1", "test2"), true); verify(forceMerge("test1", "test2"), true); - verify(refresh("test1", "test2"), true); + verify(refreshBuilder("test1", "test2"), true); verify(validateQuery("test1", "test2"), true); verify(aliasExists("test1", "test2"), true); verify(typesExists("test1", "test2"), true); @@ -97,7 +97,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1", "test2").setIndicesOptions(options), true); verify(stats("test1", "test2").setIndicesOptions(options), true); verify(forceMerge("test1", "test2").setIndicesOptions(options), true); - verify(refresh("test1", "test2").setIndicesOptions(options), true); + verify(refreshBuilder("test1", "test2").setIndicesOptions(options), true); verify(validateQuery("test1", "test2").setIndicesOptions(options), true); verify(aliasExists("test1", "test2").setIndicesOptions(options), true); verify(typesExists("test1", "test2").setIndicesOptions(options), true); @@ -118,7 +118,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1", "test2").setIndicesOptions(options), false); verify(stats("test1", "test2").setIndicesOptions(options), false); verify(forceMerge("test1", "test2").setIndicesOptions(options), false); - verify(refresh("test1", "test2").setIndicesOptions(options), false); + verify(refreshBuilder("test1", "test2").setIndicesOptions(options), false); verify(validateQuery("test1", "test2").setIndicesOptions(options), false); verify(aliasExists("test1", "test2").setIndicesOptions(options), false); verify(typesExists("test1", "test2").setIndicesOptions(options), false); @@ -141,7 +141,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1", "test2").setIndicesOptions(options), false); verify(stats("test1", "test2").setIndicesOptions(options), false); verify(forceMerge("test1", "test2").setIndicesOptions(options), false); - verify(refresh("test1", "test2").setIndicesOptions(options), false); + verify(refreshBuilder("test1", "test2").setIndicesOptions(options), false); verify(validateQuery("test1", "test2").setIndicesOptions(options), false); verify(aliasExists("test1", "test2").setIndicesOptions(options), false); verify(typesExists("test1", "test2").setIndicesOptions(options), false); @@ -172,7 +172,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), true); verify(stats("test1").setIndicesOptions(options), true); verify(forceMerge("test1").setIndicesOptions(options), true); - verify(refresh("test1").setIndicesOptions(options), true); + verify(refreshBuilder("test1").setIndicesOptions(options), true); verify(validateQuery("test1").setIndicesOptions(options), true); verify(aliasExists("test1").setIndicesOptions(options), true); verify(typesExists("test1").setIndicesOptions(options), true); @@ -193,7 +193,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), false); verify(stats("test1").setIndicesOptions(options), false); verify(forceMerge("test1").setIndicesOptions(options), false); - verify(refresh("test1").setIndicesOptions(options), false); + verify(refreshBuilder("test1").setIndicesOptions(options), false); verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); @@ -217,7 +217,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), false); verify(stats("test1").setIndicesOptions(options), false); verify(forceMerge("test1").setIndicesOptions(options), false); - verify(refresh("test1").setIndicesOptions(options), false); + verify(refreshBuilder("test1").setIndicesOptions(options), false); verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); @@ -240,7 +240,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), true); verify(stats("test1").setIndicesOptions(options), true); verify(forceMerge("test1").setIndicesOptions(options), true); - verify(refresh("test1").setIndicesOptions(options), true); + verify(refreshBuilder("test1").setIndicesOptions(options), true); verify(validateQuery("test1").setIndicesOptions(options), true); verify(aliasExists("test1").setIndicesOptions(options), true); verify(typesExists("test1").setIndicesOptions(options), true); @@ -260,7 +260,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), false); verify(stats("test1").setIndicesOptions(options), false); verify(forceMerge("test1").setIndicesOptions(options), false); - verify(refresh("test1").setIndicesOptions(options), false); + verify(refreshBuilder("test1").setIndicesOptions(options), false); verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); @@ -283,7 +283,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments("test1").setIndicesOptions(options), false); verify(stats("test1").setIndicesOptions(options), false); verify(forceMerge("test1").setIndicesOptions(options), false); - verify(refresh("test1").setIndicesOptions(options), false); + verify(refreshBuilder("test1").setIndicesOptions(options), false); verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); @@ -336,7 +336,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments(indices), false); verify(stats(indices), false); verify(forceMerge(indices), false); - verify(refresh(indices), false); + verify(refreshBuilder(indices), false); verify(validateQuery(indices), true); verify(aliasExists(indices), false); verify(typesExists(indices), false); @@ -358,7 +358,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments(indices).setIndicesOptions(options), false); verify(stats(indices).setIndicesOptions(options), false); verify(forceMerge(indices).setIndicesOptions(options), false); - verify(refresh(indices).setIndicesOptions(options), false); + verify(refreshBuilder(indices).setIndicesOptions(options), false); verify(validateQuery(indices).setIndicesOptions(options), false); verify(aliasExists(indices).setIndicesOptions(options), false); verify(typesExists(indices).setIndicesOptions(options), false); @@ -383,7 +383,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments(indices), false); verify(stats(indices), false); verify(forceMerge(indices), false); - verify(refresh(indices), false); + verify(refreshBuilder(indices), false); verify(validateQuery(indices), false); verify(aliasExists(indices), false); verify(typesExists(indices), false); @@ -405,7 +405,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments(indices), false); verify(stats(indices), false); verify(forceMerge(indices), false); - verify(refresh(indices), false); + verify(refreshBuilder(indices), false); verify(validateQuery(indices), true); verify(aliasExists(indices), false); verify(typesExists(indices), false); @@ -427,7 +427,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(segments(indices).setIndicesOptions(options), false); verify(stats(indices).setIndicesOptions(options), false); verify(forceMerge(indices).setIndicesOptions(options), false); - verify(refresh(indices).setIndicesOptions(options), false); + verify(refreshBuilder(indices).setIndicesOptions(options), false); verify(validateQuery(indices).setIndicesOptions(options), false); verify(aliasExists(indices).setIndicesOptions(options), false); verify(typesExists(indices).setIndicesOptions(options), false); @@ -770,7 +770,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { return client().admin().indices().prepareForceMerge(indices); } - private static RefreshRequestBuilder refresh(String... indices) { + private static RefreshRequestBuilder refreshBuilder(String... indices) { return client().admin().indices().prepareRefresh(indices); } diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java b/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java index 7034d5b439f..2b19b01f2c4 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java @@ -20,9 +20,6 @@ package org.elasticsearch.indices.analysis; import org.apache.lucene.analysis.util.StopwordAnalyzerBase; -import org.apache.lucene.util.Version; - -import java.io.Reader; public class DummyAnalyzer extends StopwordAnalyzerBase { diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index 80993229bec..335a9d38fba 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -503,4 +503,19 @@ public class AnalyzeActionIT extends ESIntegTestCase { } + public void testNonExistTokenizer() { + try { + AnalyzeResponse analyzeResponse = client().admin().indices() + .prepareAnalyze("this is a test") + .setAnalyzer("not_exist_analyzer") + .get(); + fail("shouldn't get here"); + } catch (Throwable t) { + assertThat(t, instanceOf(IllegalArgumentException.class)); + assertThat(t.getMessage(), startsWith("failed to find global analyzer")); + + } + + } + } diff --git a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java index c25b20699aa..83295e9548a 100644 --- a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.indices.cache.query; -import java.io.IOException; - import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -42,6 +40,8 @@ import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; + public class IndicesQueryCacheTests extends ESTestCase { private static class DummyQuery extends Query { diff --git a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index aa8c9f18c01..7acc289e209 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices.flush; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushResponse; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.cluster.ClusterState; @@ -27,7 +28,10 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -99,7 +103,7 @@ public class FlushIT extends ESIntegTestCase { result = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), new ShardId("test", 0)); } else { logger.info("--> sync flushing index [test]"); - IndicesSyncedFlushResult indicesResult = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test"); + SyncedFlushResponse indicesResult = client().admin().indices().prepareSyncedFlush("test").get(); result = indicesResult.getShardsResultPerIndex().get("test").get(0); } assertFalse(result.failed()); @@ -143,14 +147,13 @@ public class FlushIT extends ESIntegTestCase { } } - @TestLogging("indices:TRACE") public void testSyncedFlushWithConcurrentIndexing() throws Exception { internalCluster().ensureAtLeastNumDataNodes(3); createIndex("test"); client().admin().indices().prepareUpdateSettings("test").setSettings( - Settings.builder().put("index.translog.disable_flush", true).put("index.refresh_interval", -1).put("index.number_of_replicas", internalCluster().numDataNodes() - 1)) + Settings.builder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)).put("index.refresh_interval", -1).put("index.number_of_replicas", internalCluster().numDataNodes() - 1)) .get(); ensureGreen(); final AtomicBoolean stop = new AtomicBoolean(false); @@ -171,7 +174,7 @@ public class FlushIT extends ESIntegTestCase { assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); } logger.info("--> trying sync flush"); - IndicesSyncedFlushResult syncedFlushResult = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test"); + SyncedFlushResponse syncedFlushResult = client().admin().indices().prepareSyncedFlush("test").get(); logger.info("--> sync flush done"); stop.set(true); indexingThread.join(); @@ -191,7 +194,7 @@ public class FlushIT extends ESIntegTestCase { for (final ShardStats shardStats : shardsStats) { for (final ShardsSyncedFlushResult shardResult : syncedFlushResults) { if (shardStats.getShardRouting().getId() == shardResult.shardId().getId()) { - for (Map.Entry singleResponse : shardResult.shardResponses().entrySet()) { + for (Map.Entry singleResponse : shardResult.shardResponses().entrySet()) { if (singleResponse.getKey().currentNodeId().equals(shardStats.getShardRouting().currentNodeId())) { if (singleResponse.getValue().success()) { logger.info("{} sync flushed on node {}", singleResponse.getKey().shardId(), singleResponse.getKey().currentNodeId()); @@ -212,7 +215,7 @@ public class FlushIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder().put("index.routing.allocation.include._name", "nonexistent")).get(); // this should not hang but instead immediately return with empty result set - List shardsResult = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test").getShardsResultPerIndex().get("test"); + List shardsResult = client().admin().indices().prepareSyncedFlush("test").get().getShardsResultPerIndex().get("test"); // just to make sure the test actually tests the right thing int numShards = client().admin().indices().prepareGetSettings("test").get().getIndexToSettings().get("test").getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1); assertThat(shardsResult.size(), equalTo(numShards)); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java index 1a4bf8fd3f7..e4c9cb8a7ef 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java @@ -98,7 +98,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { assertNotNull(syncedFlushResult); assertEquals(1, syncedFlushResult.successfulShards()); assertEquals(1, syncedFlushResult.totalShards()); - SyncedFlushService.SyncedFlushResponse response = syncedFlushResult.shardResponses().values().iterator().next(); + SyncedFlushService.ShardSyncedFlushResponse response = syncedFlushResult.shardResponses().values().iterator().next(); assertTrue(response.success()); } @@ -157,7 +157,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { assertNull(listener.result); assertEquals("no such index", listener.error.getMessage()); } - + public void testFailAfterIntermediateCommit() throws InterruptedException { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java index fef6c23231e..485ec020c3f 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java @@ -20,7 +20,6 @@ package org.elasticsearch.indices.flush; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.index.engine.Engine; @@ -38,25 +37,6 @@ public class SyncedFlushUtil { } - /** - * Blocking single index version of {@link SyncedFlushService#attemptSyncedFlush(String[], IndicesOptions, ActionListener)} - */ - public static IndicesSyncedFlushResult attemptSyncedFlush(InternalTestCluster cluster, String index) { - SyncedFlushService service = cluster.getInstance(SyncedFlushService.class); - LatchedListener listener = new LatchedListener(); - service.attemptSyncedFlush(new String[]{index}, IndicesOptions.lenientExpandOpen(), listener); - try { - listener.latch.await(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - if (listener.error != null) { - throw ExceptionsHelper.convertToElastic(listener.error); - } - return listener.result; - } - - /** * Blocking version of {@link SyncedFlushService#attemptSyncedFlush(ShardId, ActionListener)} */ @@ -109,5 +89,4 @@ public class SyncedFlushUtil { } return listener.result; } - } diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index ed4b95c03d8..c6e9796ab60 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -42,10 +42,19 @@ import java.util.Map; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.cluster.metadata.IndexMetaData.*; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; @ClusterScope(randomDynamicTemplates = false) public class UpdateMappingIntegrationIT extends ESIntegTestCase { @@ -140,7 +149,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}").execute().actionGet(); fail("Expected MergeMappingException"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [body] cannot be changed from type [string] to [int]")); + assertThat(e.getMessage(), containsString("mapper [body] of different type, current_type [string], merged_type [integer]")); } } diff --git a/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java b/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java index 3fcf2467ca8..222a4d09433 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.indices.memory; -import java.util.*; - import org.apache.lucene.index.DirectoryReader; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.common.settings.Settings; @@ -32,6 +30,14 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java index 3398839b905..b1b56acd8cd 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java @@ -41,12 +41,12 @@ public class CircuitBreakerNoopIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, "noop") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), "noop") // This is set low, because if the "noop" is not a noop, it will break - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, "10b") - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, "noop") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), "noop") // This is set low, because if the "noop" is not a noop, it will break - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, "10b") + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") .build(); } diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index fcd94d99585..1af04e295dd 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -63,13 +63,13 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { private void reset() { logger.info("--> resetting breaker settings"); Settings resetSettings = settingsBuilder() - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, - HierarchyCircuitBreakerService.DEFAULT_FIELDDATA_BREAKER_LIMIT) - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, - HierarchyCircuitBreakerService.DEFAULT_FIELDDATA_OVERHEAD_CONSTANT) - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, - HierarchyCircuitBreakerService.DEFAULT_REQUEST_BREAKER_LIMIT) - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.0) + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getDefault(null)) + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getDefault(null)) + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getDefault(null)) + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings)); } @@ -119,8 +119,8 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // Update circuit breaker settings Settings settings = settingsBuilder() - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, "100b") - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.05) + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "100b") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.05) .build(); assertAcked(client.admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); @@ -168,8 +168,8 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // Update circuit breaker settings Settings settings = settingsBuilder() - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, "100b") - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.05) + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "100b") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.05) .build(); assertAcked(client.admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); @@ -213,8 +213,8 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { .getNodes()[0].getBreaker().getStats(CircuitBreaker.REQUEST).getLimit(); Settings resetSettings = settingsBuilder() - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, "10b") - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.0) + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0) .build(); assertAcked(client.admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings)); @@ -234,9 +234,9 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // Adjust settings so the parent breaker will fail, but the fielddata breaker doesn't resetSettings = settingsBuilder() - .put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, "15b") - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, "90%") - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, 1.0) + .put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "15b") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "90%") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0) .build(); client.admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings).execute().actionGet(); @@ -261,7 +261,7 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // Make request breaker limited to a small amount Settings resetSettings = settingsBuilder() - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, "10b") + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") .build(); assertAcked(client.admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings)); diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java index 741ea305254..033b3bb75ca 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerUnitTests.java @@ -20,11 +20,11 @@ package org.elasticsearch.indices.memory.breaker; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.breaker.BreakerSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; @@ -66,7 +66,7 @@ public class CircuitBreakerUnitTests extends ESTestCase { } public void testRegisterCustomBreaker() throws Exception { - CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new NodeSettingsService(Settings.EMPTY)); + CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); String customName = "custom"; BreakerSettings settings = new BreakerSettings(customName, 20, 1.0); service.registerBreaker(settings); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 4cf60289a18..dc61d4bc5fe 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; @@ -145,7 +144,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { assertTrue(client().admin().cluster().prepareUpdateSettings() .setTransientSettings(Settings.builder() // one chunk per sec.. - .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, chunkSize, ByteSizeUnit.BYTES) + .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), chunkSize, ByteSizeUnit.BYTES) ) .get().isAcknowledged()); } @@ -156,7 +155,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { } assertTrue(client().admin().cluster().prepareUpdateSettings() .setTransientSettings(Settings.builder() - .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, "20mb") + .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), "20mb") ) .get().isAcknowledged()); } @@ -529,8 +528,8 @@ public class IndexRecoveryIT extends ESIntegTestCase { public void testDisconnectsWhileRecovering() throws Exception { final String indexName = "test"; final Settings nodeSettings = Settings.builder() - .put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, "100ms") - .put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, "1s") + .put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), "100ms") + .put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), "1s") .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) // restarted recoveries will delete temp files and write them again .build(); // start a master node diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 8346003287c..a64b8606aea 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.index.Index; @@ -44,7 +45,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; @@ -60,7 +60,7 @@ import java.util.concurrent.atomic.AtomicBoolean; public class RecoverySourceHandlerTests extends ESTestCase { private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("index"), Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build()); private final ShardId shardId = new ShardId(INDEX_SETTINGS.getIndex(), 1); - private final NodeSettingsService service = new NodeSettingsService(Settings.EMPTY); + private final ClusterSettings service = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); public void testSendFiles() throws Throwable { Settings settings = Settings.builder().put("indices.recovery.concurrent_streams", 1). @@ -108,7 +108,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { assertEquals(0, recoveryDiff.missing.size()); IndexReader reader = DirectoryReader.open(targetStore.directory()); assertEquals(numDocs, reader.maxDoc()); - IOUtils.close(reader, writer, store, targetStore, recoverySettings); + IOUtils.close(reader, writer, store, targetStore); } public void testHandleCorruptedIndexOnSendSendFiles() throws Throwable { @@ -170,7 +170,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { assertNotNull(ExceptionsHelper.unwrapCorruption(ex)); } assertTrue(failedEngine.get()); - IOUtils.close(store, targetStore, recoverySettings); + IOUtils.close(store, targetStore); } @@ -231,7 +231,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { fail("not expected here"); } assertFalse(failedEngine.get()); - IOUtils.close(store, targetStore, recoverySettings); + IOUtils.close(store, targetStore); } private Store newStore(Path path) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java index 8b23354ebb8..4c1a6420bfd 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java @@ -26,7 +26,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.recovery.RecoveryState.*; +import org.elasticsearch.indices.recovery.RecoveryState.File; +import org.elasticsearch.indices.recovery.RecoveryState.Index; +import org.elasticsearch.indices.recovery.RecoveryState.Stage; +import org.elasticsearch.indices.recovery.RecoveryState.Timer; +import org.elasticsearch.indices.recovery.RecoveryState.Translog; +import org.elasticsearch.indices.recovery.RecoveryState.Type; +import org.elasticsearch.indices.recovery.RecoveryState.VerifyIndex; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -37,7 +43,14 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.VersionUtils.randomVersion; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.arrayContainingInAnyOrder; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class RecoveryStateTests extends ESTestCase { abstract class Streamer extends Thread { diff --git a/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java b/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java index a2a7c5fb493..4886ee0886b 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.indices.settings; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; @@ -40,7 +41,7 @@ public class GetSettingsBlocksIT extends ESIntegTestCase { .setSettings(Settings.settingsBuilder() .put("index.refresh_interval", -1) .put("index.merge.policy.expunge_deletes_allowed", "30") - .put("index.mapper.dynamic", false))); + .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING, false))); for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) { try { @@ -49,7 +50,7 @@ public class GetSettingsBlocksIT extends ESIntegTestCase { assertThat(response.getIndexToSettings().size(), greaterThanOrEqualTo(1)); assertThat(response.getSetting("test", "index.refresh_interval"), equalTo("-1")); assertThat(response.getSetting("test", "index.merge.policy.expunge_deletes_allowed"), equalTo("30")); - assertThat(response.getSetting("test", "index.mapper.dynamic"), equalTo("false")); + assertThat(response.getSetting("test", MapperService.INDEX_MAPPER_DYNAMIC_SETTING), equalTo("false")); } finally { disableIndexBlock("test", block); } diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java index efb94aff31e..53a93fd0bd1 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java @@ -20,8 +20,8 @@ package org.elasticsearch.indices.settings; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/test/java/org/elasticsearch/indices/state/CloseIndexDisableCloseAllIT.java b/core/src/test/java/org/elasticsearch/indices/state/CloseIndexDisableCloseAllIT.java index bde40aa928f..8ec629dbbdc 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/CloseIndexDisableCloseAllIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/CloseIndexDisableCloseAllIT.java @@ -40,7 +40,7 @@ public class CloseIndexDisableCloseAllIT extends ESIntegTestCase { // The cluster scope is test b/c we can't clear cluster settings. public void testCloseAllRequiresName() { Settings clusterSettings = Settings.builder() - .put(DestructiveOperations.REQUIRES_NAME, true) + .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), true) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(clusterSettings)); createIndex("test1", "test2", "test3"); @@ -91,7 +91,7 @@ public class CloseIndexDisableCloseAllIT extends ESIntegTestCase { createIndex("test_no_close"); healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); assertThat(healthResponse.isTimedOut(), equalTo(false)); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(TransportCloseIndexAction.SETTING_CLUSTER_INDICES_CLOSE_ENABLE, false)).get(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING.getKey(), false)).get(); try { client.admin().indices().prepareClose("test_no_close").execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 96611aeca8a..4bf752886c9 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -56,12 +56,19 @@ import org.elasticsearch.test.disruption.BlockClusterStateProcessing; import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; /** */ @@ -171,7 +178,7 @@ public class RareClusterStateIT extends ESIntegTestCase { ensureGreen("test"); // now that the cluster is stable, remove publishing timeout - assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "0"))); + assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "0"))); Set nodes = new HashSet<>(Arrays.asList(internalCluster().getNodeNames())); nodes.remove(internalCluster().getMasterName()); @@ -200,7 +207,7 @@ public class RareClusterStateIT extends ESIntegTestCase { // but the change might not be on the node that performed the indexing // operation yet - Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "0ms").build(); + Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "0ms").build(); final List nodeNames = internalCluster().startNodesAsync(2, settings).get(); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut()); diff --git a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java index a5cfa816455..9522b79ea11 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java @@ -20,11 +20,11 @@ package org.elasticsearch.indices.state; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.logging.ESLogger; @@ -33,7 +33,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexClosedException; -import org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException; import org.elasticsearch.test.ESIntegTestCase; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -97,7 +96,7 @@ public class SimpleIndexStateIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); } - public void testFastCloseAfterCreateDoesNotClose() { + public void testFastCloseAfterCreateContinuesCreateAfterOpen() { logger.info("--> creating test index that cannot be allocated"); client().admin().indices().prepareCreate("test").setSettings(Settings.settingsBuilder() .put("index.routing.allocation.include.tag", "no_such_node").build()).get(); @@ -106,17 +105,14 @@ public class SimpleIndexStateIT extends ESIntegTestCase { assertThat(health.isTimedOut(), equalTo(false)); assertThat(health.getStatus(), equalTo(ClusterHealthStatus.RED)); - try { - client().admin().indices().prepareClose("test").get(); - fail("Exception should have been thrown"); - } catch(IndexPrimaryShardNotAllocatedException e) { - // expected - } + client().admin().indices().prepareClose("test").get(); logger.info("--> updating test index settings to allow allocation"); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.settingsBuilder() .put("index.routing.allocation.include.tag", "").build()).get(); + client().admin().indices().prepareOpen("test").get(); + logger.info("--> waiting for green status"); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index a87da6fc046..489371224b2 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.engine.VersionConflictEngineException; @@ -46,6 +47,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.store.IndexStore; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -301,7 +303,7 @@ public class IndexStatsIT extends ESIntegTestCase { //nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get(); stats = client().admin().indices().prepareStats().execute().actionGet(); - assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTimeInMillis(), equalTo(0l)); + assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTime().millis(), equalTo(0l)); } public void testThrottleStats() throws Exception { @@ -315,7 +317,7 @@ public class IndexStatsIT extends ESIntegTestCase { .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "1") .put("index.merge.policy.type", "tiered") - + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC.name()) )); ensureGreen(); long termUpto = 0; @@ -339,7 +341,7 @@ public class IndexStatsIT extends ESIntegTestCase { refresh(); stats = client().admin().indices().prepareStats().execute().actionGet(); //nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get(); - done = stats.getPrimaries().getIndexing().getTotal().getThrottleTimeInMillis() > 0; + done = stats.getPrimaries().getIndexing().getTotal().getThrottleTime().millis() > 0; if (System.currentTimeMillis() - start > 300*1000) { //Wait 5 minutes for throttling to kick in fail("index throttling didn't kick in after 5 minutes of intense merging"); } @@ -374,7 +376,7 @@ public class IndexStatsIT extends ESIntegTestCase { assertThat(stats.getPrimaries().getIndexing().getTotal().getIndexCount(), equalTo(3l)); assertThat(stats.getPrimaries().getIndexing().getTotal().getIndexFailedCount(), equalTo(0l)); assertThat(stats.getPrimaries().getIndexing().getTotal().isThrottled(), equalTo(false)); - assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTimeInMillis(), equalTo(0l)); + assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTime().millis(), equalTo(0l)); assertThat(stats.getTotal().getIndexing().getTotal().getIndexCount(), equalTo(totalExpectedWrites)); assertThat(stats.getTotal().getStore(), notNullValue()); assertThat(stats.getTotal().getMerge(), notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index fc4dd4f6487..9c9cb01e597 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -20,14 +20,20 @@ package org.elasticsearch.indices.store; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; @@ -35,7 +41,6 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -50,7 +55,11 @@ import org.elasticsearch.test.disruption.BlockClusterStateProcessing; import org.elasticsearch.test.disruption.SingleNodeDisruption; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.nio.file.Files; @@ -303,7 +312,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { // disable allocation to control the situation more easily assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none"))); + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); logger.debug("--> shutting down two random nodes"); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node1, node2, node3)); @@ -322,7 +331,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { .put(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "_name", "NONE"))); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "all"))); + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all"))); logger.debug("--> waiting for shards to recover on [{}]", node4); // we have to do this in two steps as we now do async shard fetching before assigning, so the change to the @@ -340,7 +349,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { // disable allocation again to control concurrency a bit and allow shard active to kick in before allocation assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none"))); + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); logger.debug("--> starting the two old nodes back"); @@ -351,7 +360,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "all"))); + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all"))); logger.debug("--> waiting for the lost shard to be recovered"); @@ -396,7 +405,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { // disable relocations when we do this, to make sure the shards are not relocated from node2 // due to rebalancing, and delete its content - client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE)).get(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE)).get(); internalCluster().getInstance(ClusterService.class, nonMasterNode).submitStateUpdateTask("test", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) throws Exception { diff --git a/core/src/test/java/org/elasticsearch/indices/warmer/IndicesWarmerBlocksIT.java b/core/src/test/java/org/elasticsearch/indices/warmer/IndicesWarmerBlocksIT.java index 62bac50b0a1..5ca4a99ac1a 100644 --- a/core/src/test/java/org/elasticsearch/indices/warmer/IndicesWarmerBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/indices/warmer/IndicesWarmerBlocksIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.indices.warmer; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersResponse; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.warmer.IndexWarmersMetaData; diff --git a/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java b/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java index 8470020f823..dbbf3bf7247 100644 --- a/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.indices.warmer; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerResponse; import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersResponse; diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index 0d59341f1c9..5d482edafd1 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -22,7 +22,15 @@ package org.elasticsearch.monitor.os; import org.apache.lucene.util.Constants; import org.elasticsearch.test.ESTestCase; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class OsProbeTests extends ESTestCase { OsProbe probe = OsProbe.getInstance(); diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index 9236a16dcbd..4dda068ddd6 100644 --- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -35,10 +35,11 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class InternalSettingsPreparerTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java new file mode 100644 index 00000000000..693ba4a2eba --- /dev/null +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -0,0 +1,140 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.nodesinfo; + +import org.elasticsearch.Build; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.monitor.os.DummyOsInfo; +import org.elasticsearch.monitor.os.OsInfo; +import org.elasticsearch.monitor.process.ProcessInfo; +import org.elasticsearch.plugins.DummyPluginInfo; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.threadpool.ThreadPoolInfo; +import org.elasticsearch.transport.TransportInfo; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.core.IsEqual.equalTo; + +/** + * + */ +public class NodeInfoStreamingTests extends ESTestCase { + + public void testNodeInfoStreaming() throws IOException { + NodeInfo nodeInfo = createNodeInfo(); + Version version = Version.CURRENT; + BytesStreamOutput out = new BytesStreamOutput(); + out.setVersion(version); + nodeInfo.writeTo(out); + out.close(); + StreamInput in = StreamInput.wrap(out.bytes()); + in.setVersion(version); + NodeInfo readNodeInfo = NodeInfo.readNodeInfo(in); + assertExpectedUnchanged(nodeInfo, readNodeInfo); + + } + // checks all properties that are expected to be unchanged. Once we start changing them between versions this method has to be changed as well + private void assertExpectedUnchanged(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException { + assertThat(nodeInfo.getBuild().toString(), equalTo(readNodeInfo.getBuild().toString())); + assertThat(nodeInfo.getHostname(), equalTo(readNodeInfo.getHostname())); + assertThat(nodeInfo.getVersion(), equalTo(readNodeInfo.getVersion())); + assertThat(nodeInfo.getServiceAttributes().size(), equalTo(readNodeInfo.getServiceAttributes().size())); + for (Map.Entry entry : nodeInfo.getServiceAttributes().entrySet()) { + assertNotNull(readNodeInfo.getServiceAttributes().get(entry.getKey())); + assertThat(readNodeInfo.getServiceAttributes().get(entry.getKey()), equalTo(entry.getValue())); + } + compareJsonOutput(nodeInfo.getHttp(), readNodeInfo.getHttp()); + compareJsonOutput(nodeInfo.getJvm(), readNodeInfo.getJvm()); + compareJsonOutput(nodeInfo.getProcess(), readNodeInfo.getProcess()); + compareJsonOutput(nodeInfo.getSettings(), readNodeInfo.getSettings()); + compareJsonOutput(nodeInfo.getThreadPool(), readNodeInfo.getThreadPool()); + compareJsonOutput(nodeInfo.getTransport(), readNodeInfo.getTransport()); + compareJsonOutput(nodeInfo.getNode(), readNodeInfo.getNode()); + compareJsonOutput(nodeInfo.getOs(), readNodeInfo.getOs()); + comparePluginsAndModules(nodeInfo, readNodeInfo); + } + + private void comparePluginsAndModules(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException { + ToXContent.Params params = ToXContent.EMPTY_PARAMS; + XContentBuilder pluginsAndModules = jsonBuilder(); + pluginsAndModules.startObject(); + nodeInfo.getPlugins().toXContent(pluginsAndModules, params); + pluginsAndModules.endObject(); + XContentBuilder readPluginsAndModules = jsonBuilder(); + readPluginsAndModules.startObject(); + readNodeInfo.getPlugins().toXContent(readPluginsAndModules, params); + readPluginsAndModules.endObject(); + assertThat(pluginsAndModules.string(), equalTo(readPluginsAndModules.string())); + } + + private void compareJsonOutput(ToXContent param1, ToXContent param2) throws IOException { + ToXContent.Params params = ToXContent.EMPTY_PARAMS; + XContentBuilder param1Builder = jsonBuilder(); + XContentBuilder param2Builder = jsonBuilder(); + param1.toXContent(param1Builder, params); + param2.toXContent(param2Builder, params); + assertThat(param1Builder.string(), equalTo(param2Builder.string())); + } + + + private NodeInfo createNodeInfo() { + Build build = Build.CURRENT; + DiscoveryNode node = new DiscoveryNode("test_node", DummyTransportAddress.INSTANCE, VersionUtils.randomVersion(random())); + Map serviceAttributes = new HashMap<>(); + serviceAttributes.put("test", "attribute"); + Settings settings = Settings.builder().put("test", "setting").build(); + OsInfo osInfo = DummyOsInfo.INSTANCE; + ProcessInfo process = new ProcessInfo(randomInt(), randomBoolean()); + JvmInfo jvm = JvmInfo.jvmInfo(); + List threadPoolInfos = new ArrayList<>(); + threadPoolInfos.add(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5)); + ThreadPoolInfo threadPoolInfo = new ThreadPoolInfo(threadPoolInfos); + Map profileAddresses = new HashMap<>(); + BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress(new TransportAddress[]{DummyTransportAddress.INSTANCE}, DummyTransportAddress.INSTANCE); + profileAddresses.put("test_address", dummyBoundTransportAddress); + TransportInfo transport = new TransportInfo(dummyBoundTransportAddress, profileAddresses); + HttpInfo htttpInfo = new HttpInfo(dummyBoundTransportAddress, randomLong()); + PluginsAndModules plugins = new PluginsAndModules(); + plugins.addModule(DummyPluginInfo.INSTANCE); + plugins.addPlugin(DummyPluginInfo.INSTANCE); + return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins); + } +} diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java index 6b23bb09f24..93ba861dca0 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java @@ -31,7 +31,9 @@ import org.elasticsearch.test.ESIntegTestCase.Scope; import java.util.List; import static org.elasticsearch.client.Requests.nodesInfoRequest; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; /** * diff --git a/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java b/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java index 49d22b87bf8..514b1757e41 100644 --- a/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java @@ -34,7 +34,7 @@ public class DestructiveOperationsIntegrationIT extends ESIntegTestCase { // The cluster scope is test b/c we can't clear cluster settings. public void testDestructiveOperations() throws Exception { Settings settings = Settings.builder() - .put(DestructiveOperations.REQUIRES_NAME, true) + .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), true) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); @@ -58,7 +58,7 @@ public class DestructiveOperationsIntegrationIT extends ESIntegTestCase { } settings = Settings.builder() - .put(DestructiveOperations.REQUIRES_NAME, false) + .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), false) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); @@ -68,7 +68,7 @@ public class DestructiveOperationsIntegrationIT extends ESIntegTestCase { // end delete index: // close index: settings = Settings.builder() - .put(DestructiveOperations.REQUIRES_NAME, true) + .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), true) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); @@ -100,7 +100,7 @@ public class DestructiveOperationsIntegrationIT extends ESIntegTestCase { } settings = Settings.builder() - .put(DestructiveOperations.REQUIRES_NAME, false) + .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), false) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); assertAcked(client().admin().indices().prepareClose("_all").get()); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index f627e0217e6..be1acb1218d 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.percolate.PercolateSourceBuilder; import org.elasticsearch.action.search.SearchResponse; diff --git a/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java index db660695843..6a0485133a2 100644 --- a/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java @@ -21,11 +21,11 @@ package org.elasticsearch.percolator; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; import org.elasticsearch.action.percolate.MultiPercolateResponse; import org.elasticsearch.action.percolate.PercolateRequestBuilder; import org.elasticsearch.action.percolate.PercolateResponse; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.IndexMetaData; diff --git a/core/src/test/java/org/elasticsearch/plugins/PluggableTransportModuleIT.java b/core/src/test/java/org/elasticsearch/plugins/PluggableTransportModuleIT.java deleted file mode 100644 index 7831b7ca994..00000000000 --- a/core/src/test/java/org/elasticsearch/plugins/PluggableTransportModuleIT.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugins; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.DiscoveryModule; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.transport.AssertingLocalTransport; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportModule; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.transport.TransportRequestOptions; - -import java.io.IOException; -import java.util.Collection; -import java.util.concurrent.atomic.AtomicInteger; - -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; - -/** - * - */ -@ClusterScope(scope = Scope.SUITE, numDataNodes = 2) -public class PluggableTransportModuleIT extends ESIntegTestCase { - public static final AtomicInteger SENT_REQUEST_COUNTER = new AtomicInteger(0); - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return settingsBuilder() - .put(super.nodeSettings(nodeOrdinal)) - .put(DiscoveryModule.DISCOVERY_TYPE_KEY, "local") - .build(); - } - - @Override - protected Collection> nodePlugins() { - return pluginList(CountingSentRequestsPlugin.class); - } - - @Override - protected Collection> transportClientPlugins() { - return pluginList(CountingSentRequestsPlugin.class); - } - - public void testThatPluginFunctionalityIsLoadedWithoutConfiguration() throws Exception { - for (Transport transport : internalCluster().getInstances(Transport.class)) { - assertThat(transport, instanceOf(CountingAssertingLocalTransport.class)); - } - - int countBeforeRequest = SENT_REQUEST_COUNTER.get(); - internalCluster().clientNodeClient().admin().cluster().prepareHealth().get(); - int countAfterRequest = SENT_REQUEST_COUNTER.get(); - assertThat("Expected send request counter to be greather than zero", countAfterRequest, is(greaterThan(countBeforeRequest))); - } - - public static class CountingSentRequestsPlugin extends Plugin { - @Override - public String name() { - return "counting-pipelines-plugin"; - } - - @Override - public String description() { - return "counting-pipelines-plugin"; - } - - public void onModule(TransportModule transportModule) { - transportModule.setTransport(CountingAssertingLocalTransport.class, this.name()); - } - } - - public static final class CountingAssertingLocalTransport extends AssertingLocalTransport { - - @Inject - public CountingAssertingLocalTransport(Settings settings, ThreadPool threadPool, Version version, NamedWriteableRegistry namedWriteableRegistry) { - super(settings, threadPool, version, namedWriteableRegistry); - } - - @Override - public void sendRequest(final DiscoveryNode node, final long requestId, final String action, final TransportRequest request, TransportRequestOptions options) throws IOException, TransportException { - SENT_REQUEST_COUNTER.incrementAndGet(); - super.sendRequest(node, requestId, action, request, options); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java index 000365f6a20..deaff46f27b 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java @@ -23,12 +23,9 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; -import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.List; -import java.util.Properties; import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; diff --git a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java index b9282cf05ad..a16f318140f 100644 --- a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java +++ b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java @@ -19,8 +19,8 @@ package org.elasticsearch.plugins.responseheader; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.RestModule; public class TestResponseHeaderPlugin extends Plugin { @@ -34,7 +34,7 @@ public class TestResponseHeaderPlugin extends Plugin { return "test-plugin-custom-header-desc"; } - public void onModule(RestModule restModule) { - restModule.addRestAction(TestResponseHeaderRestAction.class); + public void onModule(NetworkModule module) { + module.registerRestHandler(TestResponseHeaderRestAction.class); } } diff --git a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java index 83fa3e21295..4b1645a4ec6 100644 --- a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java +++ b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java @@ -21,7 +21,13 @@ package org.elasticsearch.plugins.responseheader; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; public class TestResponseHeaderRestAction extends BaseRestHandler { diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java b/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java index 4bcbb8c8ee7..26c22fc3bb0 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoverySettingsTests.java @@ -32,49 +32,37 @@ public class RecoverySettingsTests extends ESSingleNodeTestCase { } public void testAllSettingsAreDynamicallyUpdatable() { - innerTestSettings(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, randomIntBetween(1, 200), new Validator() { - @Override - public void validate(RecoverySettings recoverySettings, int expectedValue) { - assertEquals(expectedValue, recoverySettings.concurrentStreamPool().getMaximumPoolSize()); - } - }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, randomIntBetween(1, 200), new Validator() { - @Override - public void validate(RecoverySettings recoverySettings, int expectedValue) { - assertEquals(expectedValue, recoverySettings.concurrentSmallFileStreamPool().getMaximumPoolSize()); - } - }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, 0, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), 0, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(null, recoverySettings.rateLimiter()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(), randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.retryDelayStateSync().millis()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.retryDelayNetwork().millis()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT, randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.getKey(), randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.activityTimeout().millis()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.internalActionTimeout().millis()); } }); - innerTestSettings(RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { + innerTestSettings(RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getKey(), randomIntBetween(1, 200), TimeUnit.MILLISECONDS, new Validator() { @Override public void validate(RecoverySettings recoverySettings, int expectedValue) { assertEquals(expectedValue, recoverySettings.internalActionLongTimeout().millis()); diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 7095639eafc..438445d538a 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -29,9 +29,9 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.math.MathUtils; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.BackgroundIndexer; @@ -44,7 +44,10 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { private final ESLogger logger = Loggers.getLogger(RecoveryWhileUnderLoadIT.class); @@ -52,7 +55,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -105,7 +108,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasRelocatePrimariesTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -156,7 +159,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadWithReducedAllowedNodes() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 2, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 2, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -227,7 +230,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { final int numReplicas = 0; logger.info("--> creating test index ..."); int allowNodes = 2; - assertAcked(prepareCreate("test", 3, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.ASYNC))); + assertAcked(prepareCreate("test", 3, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); final int numDocs = scaledRandomIntBetween(200, 9999); diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 57b5e888ea9..6542a8ab1c6 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.recovery; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.procedures.IntProcedure; - import org.apache.lucene.index.IndexFileNames; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -387,7 +386,7 @@ public class RelocationIT extends ESIntegTestCase { logger.info("--> stopping replica assignment"); assertAcked(client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, "none"))); + .setTransientSettings(Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); logger.info("--> wait for all replica shards to be removed, on all nodes"); assertBusy(new Runnable() { diff --git a/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java b/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java index 60a14abac7c..bfaf961ee21 100644 --- a/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.query.QueryBuilders; diff --git a/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java b/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java index e60a120ff18..065b99ea5ae 100644 --- a/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java @@ -27,7 +27,9 @@ import java.util.Locale; import java.util.Map; import java.util.regex.Pattern; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; /** * diff --git a/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java b/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java index 2740dd73246..db21fef6930 100644 --- a/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java +++ b/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java @@ -51,24 +51,27 @@ public class AliasResolveRoutingIT extends ESIntegTestCase { client().admin().indices().prepareAliases().addAliasAction(newAddAliasAction("test1", "alias0").routing("0")).execute().actionGet(); client().admin().indices().prepareAliases().addAliasAction(newAddAliasAction("test2", "alias0").routing("0")).execute().actionGet(); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "test1"), nullValue()); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "alias"), nullValue()); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "test1"), nullValue()); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "alias"), nullValue()); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "test1"), nullValue()); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "alias10"), equalTo("0")); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "alias20"), equalTo("0")); - assertThat(clusterService().state().metaData().resolveIndexRouting(null, "alias21"), equalTo("1")); - assertThat(clusterService().state().metaData().resolveIndexRouting("3", "test1"), equalTo("3")); - assertThat(clusterService().state().metaData().resolveIndexRouting("0", "alias10"), equalTo("0")); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "test1"), nullValue()); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "alias10"), equalTo("0")); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "alias20"), equalTo("0")); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, null, "alias21"), equalTo("1")); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, "3", "test1"), equalTo("3")); + assertThat(clusterService().state().metaData().resolveIndexRouting(null, "0", "alias10"), equalTo("0")); + + // Force the alias routing and ignore the parent. + assertThat(clusterService().state().metaData().resolveIndexRouting("1", null, "alias10"), equalTo("0")); try { - clusterService().state().metaData().resolveIndexRouting("1", "alias10"); + clusterService().state().metaData().resolveIndexRouting(null, "1", "alias10"); fail("should fail"); } catch (IllegalArgumentException e) { // all is well, we can't have two mappings, one provided, and one in the alias } try { - clusterService().state().metaData().resolveIndexRouting(null, "alias0"); + clusterService().state().metaData().resolveIndexRouting(null, null, "alias0"); fail("should fail"); } catch (IllegalArgumentException ex) { // Expected diff --git a/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java b/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java index a5b7da7796f..3bbf7146ae4 100644 --- a/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java +++ b/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java @@ -38,7 +38,9 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; public class SimpleRoutingIT extends ESIntegTestCase { @@ -46,7 +48,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { protected int minimumNumberOfShards() { return 2; } - + public void testSimpleCrudRouting() throws Exception { createIndex("test"); ensureGreen(); @@ -87,7 +89,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { assertThat(client().prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); } } - + public void testSimpleSearchRouting() { createIndex("test"); ensureGreen(); @@ -153,7 +155,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { assertThat(client().prepareSearch().setSize(0).setRouting("0", "1", "0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l)); } } - + public void testRequiredRoutingMapping() throws Exception { client().admin().indices().prepareCreate("test").addAlias(new Alias("alias")) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing").field("required", true).endObject().endObject().endObject()) @@ -214,76 +216,6 @@ public class SimpleRoutingIT extends ESIntegTestCase { assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(false)); } } - - public void testRequiredRoutingWithPathMapping() throws Exception { - client().admin().indices().prepareCreate("test") - .addAlias(new Alias("alias")) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_routing").field("required", true).field("path", "routing_field").endObject().startObject("properties") - .startObject("routing_field").field("type", "string").field("index", randomBoolean() ? "no" : "not_analyzed").field("doc_values", randomBoolean() ? "yes" : "no").endObject().endObject() - .endObject().endObject()) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID) - .execute().actionGet(); - ensureGreen(); - - logger.info("--> indexing with id [1], and routing [0]"); - client().prepareIndex(indexOrAlias(), "type1", "1").setSource("field", "value1", "routing_field", "0").setRefresh(true).execute().actionGet(); - - logger.info("--> check failure with different routing"); - try { - client().prepareIndex(indexOrAlias(), "type1", "1").setRouting("1").setSource("field", "value1", "routing_field", "0").setRefresh(true).execute().actionGet(); - fail(); - } catch (ElasticsearchException e) { - assertThat(e.unwrapCause(), instanceOf(MapperParsingException.class)); - } - - - logger.info("--> verifying get with no routing, should fail"); - for (int i = 0; i < 5; i++) { - try { - client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists(); - fail(); - } catch (RoutingMissingException e) { - assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); - } - } - logger.info("--> verifying get with routing, should find"); - for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); - } - } - - public void testRequiredRoutingWithPathMappingBulk() throws Exception { - client().admin().indices().prepareCreate("test") - .addAlias(new Alias("alias")) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_routing").field("required", true).field("path", "routing_field").endObject() - .endObject().endObject()) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID) - .execute().actionGet(); - ensureGreen(); - - logger.info("--> indexing with id [1], and routing [0]"); - client().prepareBulk().add( - client().prepareIndex(indexOrAlias(), "type1", "1").setSource("field", "value1", "routing_field", "0")).execute().actionGet(); - client().admin().indices().prepareRefresh().execute().actionGet(); - - logger.info("--> verifying get with no routing, should fail"); - for (int i = 0; i < 5; i++) { - try { - client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists(); - fail(); - } catch (RoutingMissingException e) { - assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); - } - } - logger.info("--> verifying get with routing, should find"); - for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); - } - } public void testRequiredRoutingBulk() throws Exception { client().admin().indices().prepareCreate("test") @@ -314,38 +246,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); } } - - public void testRequiredRoutingWithPathNumericType() throws Exception { - - client().admin().indices().prepareCreate("test") - .addAlias(new Alias("alias")) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_routing").field("required", true).field("path", "routing_field").endObject() - .endObject().endObject()) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID) - .execute().actionGet(); - ensureGreen(); - logger.info("--> indexing with id [1], and routing [0]"); - client().prepareIndex(indexOrAlias(), "type1", "1").setSource("field", "value1", "routing_field", 0).execute().actionGet(); - client().admin().indices().prepareRefresh().execute().actionGet(); - - logger.info("--> verifying get with no routing, should fail"); - for (int i = 0; i < 5; i++) { - try { - client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists(); - fail(); - } catch (RoutingMissingException e) { - assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); - } - } - logger.info("--> verifying get with routing, should find"); - for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); - } - } - public void testRequiredRoutingMapping_variousAPIs() throws Exception { client().admin().indices().prepareCreate("test").addAlias(new Alias("alias")) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing").field("required", true).endObject().endObject().endObject()) diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index fc888c79a8c..987aef90bc3 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -54,7 +54,7 @@ public class FileScriptTests extends ESTestCase { .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", false).build(); ScriptService scriptService = makeScriptService(settings); Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null); - assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders)); + assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap())); } public void testAllOpsDisabled() throws Exception { @@ -68,7 +68,7 @@ public class FileScriptTests extends ESTestCase { Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null); for (ScriptContext context : ScriptContext.Standard.values()) { try { - scriptService.compile(script, context, contextAndHeaders); + scriptService.compile(script, context, contextAndHeaders, Collections.emptyMap()); fail(context.getKey() + " script should have been rejected"); } catch(Exception e) { assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [file], operation [" + context.getKey() + "] and lang [" + MockScriptEngine.NAME + "] are disabled")); diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java index 02fad319846..47adeabe02f 100644 --- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.watcher.ResourceWatcherService; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; @@ -62,7 +63,7 @@ public class NativeScriptTests extends ESTestCase { ScriptService scriptService = injector.getInstance(ScriptService.class); ExecutableScript executable = scriptService.executable(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), - ScriptContext.Standard.SEARCH, contextAndHeaders); + ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()); assertThat(executable.run().toString(), equalTo("test")); terminate(injector.getInstance(ThreadPool.class)); } @@ -88,7 +89,7 @@ public class NativeScriptTests extends ESTestCase { for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { assertThat(scriptService.compile(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), scriptContext, - contextAndHeaders), notNullValue()); + contextAndHeaders, Collections.emptyMap()), notNullValue()); } } @@ -110,4 +111,4 @@ public class NativeScriptTests extends ESTestCase { return "test"; } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index 0edaedbb28e..019eb7c74a0 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -58,7 +58,7 @@ public class ScriptContextTests extends ESTestCase { for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), contextAndHeaders); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), contextAndHeaders, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (ScriptException e) { assertThat(e.getMessage(), containsString("scripts of type [" + scriptType + "], operation [" + PLUGIN_NAME + "_custom_globally_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled")); @@ -71,16 +71,16 @@ public class ScriptContextTests extends ESTestCase { ScriptService scriptService = makeScriptService(); Script script = new Script("1", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, null); try { - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), contextAndHeaders); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), contextAndHeaders, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (ScriptException e) { assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [inline], operation [" + PLUGIN_NAME + "_custom_exp_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled")); } // still works for other script contexts - assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, contextAndHeaders)); - assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders)); - assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), contextAndHeaders)); + assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, contextAndHeaders, Collections.emptyMap())); + assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap())); + assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), contextAndHeaders, Collections.emptyMap())); } public void testUnknownPluginScriptContext() throws Exception { @@ -89,7 +89,7 @@ public class ScriptContextTests extends ESTestCase { for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), contextAndHeaders); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), contextAndHeaders, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage(), e.getMessage().contains("script context [" + PLUGIN_NAME + "_unknown] not supported")); @@ -109,7 +109,7 @@ public class ScriptContextTests extends ESTestCase { for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, context, contextAndHeaders); + scriptService.compile(script, context, contextAndHeaders, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage(), e.getMessage().contains("script context [test] not supported")); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java index 3e476d2bebb..0f00c2dd58d 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java @@ -33,7 +33,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; -import static java.util.Collections.singleton; import static java.util.Collections.unmodifiableMap; import static java.util.Collections.unmodifiableSet; import static org.elasticsearch.common.util.set.Sets.newHashSet; @@ -252,7 +251,7 @@ public class ScriptModesTests extends ESTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return null; } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 23cada02c6c..3c939e7e91a 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -33,12 +33,11 @@ import org.junit.Before; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.Set; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -48,7 +47,7 @@ import static org.hamcrest.Matchers.sameInstance; public class ScriptServiceTests extends ESTestCase { private ResourceWatcherService resourceWatcherService; - private Set scriptEngineServices; + private ScriptEngineService scriptEngineService; private Map scriptEnginesByLangMap; private ScriptContextRegistry scriptContextRegistry; private ScriptContext[] scriptContexts; @@ -72,8 +71,8 @@ public class ScriptServiceTests extends ESTestCase { .put("path.conf", genericConfigFolder) .build(); resourceWatcherService = new ResourceWatcherService(baseSettings, null); - scriptEngineServices = newHashSet(new TestEngineService()); - scriptEnginesByLangMap = ScriptModesTests.buildScriptEnginesByLangMap(scriptEngineServices); + scriptEngineService = new TestEngineService(); + scriptEnginesByLangMap = ScriptModesTests.buildScriptEnginesByLangMap(Collections.singleton(scriptEngineService)); //randomly register custom script contexts int randomInt = randomIntBetween(0, 3); //prevent duplicates using map @@ -100,7 +99,7 @@ public class ScriptServiceTests extends ESTestCase { private void buildScriptService(Settings additionalSettings) throws IOException { Settings finalSettings = Settings.builder().put(baseSettings).put(additionalSettings).build(); Environment environment = new Environment(finalSettings); - scriptService = new ScriptService(finalSettings, environment, scriptEngineServices, resourceWatcherService, scriptContextRegistry) { + scriptService = new ScriptService(finalSettings, environment, Collections.singleton(scriptEngineService), resourceWatcherService, scriptContextRegistry) { @Override String getScriptFromIndex(String scriptLang, String id, HasContextAndHeaders headersContext) { //mock the script that gets retrieved from an index @@ -131,7 +130,7 @@ public class ScriptServiceTests extends ESTestCase { logger.info("--> verify that file with extension was correctly processed"); CompiledScript compiledScript = scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), - ScriptContext.Standard.SEARCH, contextAndHeaders); + ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file")); logger.info("--> delete both files"); @@ -142,7 +141,7 @@ public class ScriptServiceTests extends ESTestCase { logger.info("--> verify that file with extension was correctly removed"); try { scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH, - contextAndHeaders); + contextAndHeaders, Collections.emptyMap()); fail("the script test_script should no longer exist"); } catch (IllegalArgumentException ex) { assertThat(ex.getMessage(), containsString("Unable to find on disk file script [test_script] using lang [test]")); @@ -153,9 +152,9 @@ public class ScriptServiceTests extends ESTestCase { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); CompiledScript compiledScript1 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); CompiledScript compiledScript2 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } @@ -163,9 +162,9 @@ public class ScriptServiceTests extends ESTestCase { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); CompiledScript compiledScript1 = scriptService.compile(new Script("script", ScriptType.INLINE, "test", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); CompiledScript compiledScript2 = scriptService.compile(new Script("script", ScriptType.INLINE, "test2", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } @@ -174,9 +173,9 @@ public class ScriptServiceTests extends ESTestCase { buildScriptService(Settings.EMPTY); createFileScripts("test"); CompiledScript compiledScript1 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); CompiledScript compiledScript2 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test2", null), - randomFrom(scriptContexts), contextAndHeaders); + randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } @@ -225,13 +224,11 @@ public class ScriptServiceTests extends ESTestCase { } while (scriptContextSettings.containsKey(scriptContext)); scriptContextSettings.put(scriptContext, randomFrom(ScriptMode.values())); } - int numEngineSettings = randomIntBetween(0, 10); + int numEngineSettings = randomIntBetween(0, ScriptType.values().length * scriptContexts.length); Map engineSettings = new HashMap<>(); for (int i = 0; i < numEngineSettings; i++) { String settingKey; do { - ScriptEngineService[] scriptEngineServices = this.scriptEngineServices.toArray(new ScriptEngineService[this.scriptEngineServices.size()]); - ScriptEngineService scriptEngineService = randomFrom(scriptEngineServices); ScriptType scriptType = randomFrom(ScriptType.values()); ScriptContext scriptContext = randomFrom(this.scriptContexts); settingKey = scriptEngineService.types()[0] + "." + scriptType + "." + scriptContext.getKey(); @@ -288,40 +285,38 @@ public class ScriptServiceTests extends ESTestCase { buildScriptService(builder.build()); createFileScripts("groovy", "expression", "mustache", "test"); - for (ScriptEngineService scriptEngineService : scriptEngineServices) { - for (ScriptType scriptType : ScriptType.values()) { - //make sure file scripts have a different name than inline ones. - //Otherwise they are always considered file ones as they can be found in the static cache. - String script = scriptType == ScriptType.FILE ? "file_script" : "script"; - for (ScriptContext scriptContext : this.scriptContexts) { - //fallback mechanism: 1) engine specific settings 2) op based settings 3) source based settings - ScriptMode scriptMode = engineSettings.get(scriptEngineService.types()[0] + "." + scriptType + "." + scriptContext.getKey()); - if (scriptMode == null) { - scriptMode = scriptContextSettings.get(scriptContext.getKey()); - } - if (scriptMode == null) { - scriptMode = scriptSourceSettings.get(scriptType); - } - if (scriptMode == null) { - scriptMode = DEFAULT_SCRIPT_MODES.get(scriptType); - } + for (ScriptType scriptType : ScriptType.values()) { + //make sure file scripts have a different name than inline ones. + //Otherwise they are always considered file ones as they can be found in the static cache. + String script = scriptType == ScriptType.FILE ? "file_script" : "script"; + for (ScriptContext scriptContext : this.scriptContexts) { + //fallback mechanism: 1) engine specific settings 2) op based settings 3) source based settings + ScriptMode scriptMode = engineSettings.get(scriptEngineService.types()[0] + "." + scriptType + "." + scriptContext.getKey()); + if (scriptMode == null) { + scriptMode = scriptContextSettings.get(scriptContext.getKey()); + } + if (scriptMode == null) { + scriptMode = scriptSourceSettings.get(scriptType); + } + if (scriptMode == null) { + scriptMode = DEFAULT_SCRIPT_MODES.get(scriptType); + } - for (String lang : scriptEngineService.types()) { - switch (scriptMode) { - case ON: + for (String lang : scriptEngineService.types()) { + switch (scriptMode) { + case ON: + assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders); + break; + case OFF: + assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders); + break; + case SANDBOX: + if (scriptEngineService.sandboxed()) { assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders); - break; - case OFF: + } else { assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders); - break; - case SANDBOX: - if (scriptEngineService.sandboxed()) { - assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders); - } else { - assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders); - } - break; - } + } + break; } } } @@ -338,15 +333,13 @@ public class ScriptServiceTests extends ESTestCase { unknownContext = randomAsciiOfLength(randomIntBetween(1, 30)); } while(scriptContextRegistry.isSupportedContext(new ScriptContext.Plugin(pluginName, unknownContext))); - for (ScriptEngineService scriptEngineService : scriptEngineServices) { - for (String type : scriptEngineService.types()) { - try { - scriptService.compile(new Script("test", randomFrom(ScriptType.values()), type, null), new ScriptContext.Plugin( - pluginName, unknownContext), contextAndHeaders); - fail("script compilation should have been rejected"); - } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported")); - } + for (String type : scriptEngineService.types()) { + try { + scriptService.compile(new Script("test", randomFrom(ScriptType.values()), type, null), new ScriptContext.Plugin( + pluginName, unknownContext), contextAndHeaders, Collections.emptyMap()); + fail("script compilation should have been rejected"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported")); } } } @@ -354,20 +347,20 @@ public class ScriptServiceTests extends ESTestCase { public void testCompileCountedInCompilationStats() throws IOException { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); - scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testExecutableCountedInCompilationStats() throws IOException { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testSearchCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); - scriptService.search(null, new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts)); + scriptService.search(null, new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -377,7 +370,7 @@ public class ScriptServiceTests extends ESTestCase { int numberOfCompilations = randomIntBetween(1, 1024); for (int i = 0; i < numberOfCompilations; i++) { scriptService - .compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + .compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); } assertEquals(numberOfCompilations, scriptService.stats().getCompilations()); } @@ -387,8 +380,8 @@ public class ScriptServiceTests extends ESTestCase { Settings.Builder builder = Settings.builder(); builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1); buildScriptService(builder.build()); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -396,14 +389,14 @@ public class ScriptServiceTests extends ESTestCase { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); createFileScripts("test"); - scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testIndexedScriptCountedInCompilationStats() throws IOException { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); - scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -412,8 +405,8 @@ public class ScriptServiceTests extends ESTestCase { Settings.Builder builder = Settings.builder(); builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1); buildScriptService(builder.build()); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); - scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); assertEquals(2L, scriptService.stats().getCompilations()); assertEquals(1L, scriptService.stats().getCacheEvictions()); } @@ -429,7 +422,7 @@ public class ScriptServiceTests extends ESTestCase { private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext, HasContextAndHeaders contextAndHeaders) { try { - scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders); + scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders, Collections.emptyMap()); fail("compile should have been rejected for lang [" + lang + "], script_type [" + scriptType + "], scripted_op [" + scriptContext + "]"); } catch(ScriptException e) { //all good @@ -438,7 +431,7 @@ public class ScriptServiceTests extends ESTestCase { private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext, HasContextAndHeaders contextAndHeaders) { - assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders), notNullValue()); + assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders, Collections.emptyMap()), notNullValue()); } public static class TestEngineService implements ScriptEngineService { @@ -459,7 +452,7 @@ public class ScriptServiceTests extends ESTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return "compiled_" + script; } diff --git a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index fb3de6b0faa..8c18d1dd74b 100644 --- a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -20,8 +20,12 @@ package org.elasticsearch.search; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - -import org.apache.lucene.index.*; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 376e8578e2e..15313095650 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.search; import org.elasticsearch.common.inject.ModuleTestCase; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.highlight.CustomHighlighter; import org.elasticsearch.search.highlight.Highlighter; import org.elasticsearch.search.highlight.PlainHighlighter; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java index 5154dcc39e1..782ac3225f5 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations; import com.carrotsearch.hppc.IntIntHashMap; import com.carrotsearch.hppc.IntIntMap; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java index 9d83428038d..ee19f14293a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations; import com.carrotsearch.hppc.IntIntHashMap; import com.carrotsearch.hppc.IntIntMap; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.bucket.missing.Missing; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java index f2a78295664..63008bc501f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds; import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroid; @@ -68,18 +69,24 @@ public class MissingValueIT extends ESIntegTestCase { } public void testStringTerms() { - SearchResponse response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("bar")).get(); - assertSearchResponse(response); - Terms terms = response.getAggregations().get("my_terms"); - assertEquals(2, terms.getBuckets().size()); - assertEquals(1, terms.getBucketByKey("foo").getDocCount()); - assertEquals(1, terms.getBucketByKey("bar").getDocCount()); + for (ExecutionMode mode : ExecutionMode.values()) { + SearchResponse response = client().prepareSearch("idx").addAggregation( + terms("my_terms") + .field("str") + .executionHint(mode.toString()) + .missing("bar")).get(); + assertSearchResponse(response); + Terms terms = response.getAggregations().get("my_terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(1, terms.getBucketByKey("foo").getDocCount()); + assertEquals(1, terms.getBucketByKey("bar").getDocCount()); - response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("foo")).get(); - assertSearchResponse(response); - terms = response.getAggregations().get("my_terms"); - assertEquals(1, terms.getBuckets().size()); - assertEquals(2, terms.getBucketByKey("foo").getDocCount()); + response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("foo")).get(); + assertSearchResponse(response); + terms = response.getAggregations().get("my_terms"); + assertEquals(1, terms.getBuckets().size()); + assertEquals(2, terms.getBucketByKey("foo").getDocCount()); + } } public void testLongTerms() { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java index 0a660b85374..aad2c9bb3ed 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java @@ -99,20 +99,22 @@ public class BooleanTermsIT extends ESIntegTestCase { final int bucketCount = numSingleFalses > 0 && numSingleTrues > 0 ? 2 : numSingleFalses + numSingleTrues > 0 ? 1 : 0; assertThat(terms.getBuckets().size(), equalTo(bucketCount)); - Terms.Bucket bucket = terms.getBucketByKey("0"); + Terms.Bucket bucket = terms.getBucketByKey("false"); if (numSingleFalses == 0) { assertNull(bucket); } else { assertNotNull(bucket); assertEquals(numSingleFalses, bucket.getDocCount()); + assertEquals("false", bucket.getKeyAsString()); } - bucket = terms.getBucketByKey("1"); + bucket = terms.getBucketByKey("true"); if (numSingleTrues == 0) { assertNull(bucket); } else { assertNotNull(bucket); assertEquals(numSingleTrues, bucket.getDocCount()); + assertEquals("true", bucket.getKeyAsString()); } } @@ -131,20 +133,22 @@ public class BooleanTermsIT extends ESIntegTestCase { final int bucketCount = numMultiFalses > 0 && numMultiTrues > 0 ? 2 : numMultiFalses + numMultiTrues > 0 ? 1 : 0; assertThat(terms.getBuckets().size(), equalTo(bucketCount)); - Terms.Bucket bucket = terms.getBucketByKey("0"); + Terms.Bucket bucket = terms.getBucketByKey("false"); if (numMultiFalses == 0) { assertNull(bucket); } else { assertNotNull(bucket); assertEquals(numMultiFalses, bucket.getDocCount()); + assertEquals("false", bucket.getKeyAsString()); } - bucket = terms.getBucketByKey("1"); + bucket = terms.getBucketByKey("true"); if (numMultiTrues == 0) { assertNull(bucket); } else { assertNotNull(bucket); assertEquals(numMultiTrues, bucket.getDocCount()); + assertEquals("true", bucket.getKeyAsString()); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java index b6611a956af..e0c7d2352c3 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.children.Children; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.sum.Sum; @@ -392,6 +393,65 @@ public class ChildrenIT extends ESIntegTestCase { assertThat(terms.getBuckets().get(0).getDocCount(), equalTo(1l)); } + public void testPostCollectAllLeafReaders() throws Exception { + // The 'towns' and 'parent_names' aggs operate on parent docs and if child docs are in different segments we need + // to ensure those segments which child docs are also evaluated to in the post collect phase. + + // Before we only evaluated segments that yielded matches in 'towns' and 'parent_names' aggs, which caused + // us to miss to evaluate child docs in segments we didn't have parent matches for. + + assertAcked( + prepareCreate("index") + .addMapping("parentType", "name", "type=string,index=not_analyzed", "town", "type=string,index=not_analyzed") + .addMapping("childType", "_parent", "type=parentType", "name", "type=string,index=not_analyzed", "age", "type=integer") + ); + List requests = new ArrayList<>(); + requests.add(client().prepareIndex("index", "parentType", "1").setSource("name", "Bob", "town", "Memphis")); + requests.add(client().prepareIndex("index", "parentType", "2").setSource("name", "Alice", "town", "Chicago")); + requests.add(client().prepareIndex("index", "parentType", "3").setSource("name", "Bill", "town", "Chicago")); + requests.add(client().prepareIndex("index", "childType", "1").setSource("name", "Jill", "age", 5).setParent("1")); + requests.add(client().prepareIndex("index", "childType", "2").setSource("name", "Joey", "age", 3).setParent("1")); + requests.add(client().prepareIndex("index", "childType", "3").setSource("name", "John", "age", 2).setParent("2")); + requests.add(client().prepareIndex("index", "childType", "4").setSource("name", "Betty", "age", 6).setParent("3")); + requests.add(client().prepareIndex("index", "childType", "5").setSource("name", "Dan", "age", 1).setParent("3")); + indexRandom(true, requests); + + SearchResponse response = client().prepareSearch("index") + .setSize(0) + .addAggregation(AggregationBuilders.terms("towns").field("town") + .subAggregation(AggregationBuilders.terms("parent_names").field("name") + .subAggregation(AggregationBuilders.children("child_docs").childType("childType")) + ) + ) + .get(); + + Terms towns = response.getAggregations().get("towns"); + assertThat(towns.getBuckets().size(), equalTo(2)); + assertThat(towns.getBuckets().get(0).getKeyAsString(), equalTo("Chicago")); + assertThat(towns.getBuckets().get(0).getDocCount(), equalTo(2L)); + + Terms parents = towns.getBuckets().get(0).getAggregations().get("parent_names"); + assertThat(parents.getBuckets().size(), equalTo(2)); + assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Alice")); + assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L)); + Children children = parents.getBuckets().get(0).getAggregations().get("child_docs"); + assertThat(children.getDocCount(), equalTo(1L)); + + assertThat(parents.getBuckets().get(1).getKeyAsString(), equalTo("Bill")); + assertThat(parents.getBuckets().get(1).getDocCount(), equalTo(1L)); + children = parents.getBuckets().get(1).getAggregations().get("child_docs"); + assertThat(children.getDocCount(), equalTo(2L)); + + assertThat(towns.getBuckets().get(1).getKeyAsString(), equalTo("Memphis")); + assertThat(towns.getBuckets().get(1).getDocCount(), equalTo(1L)); + parents = towns.getBuckets().get(1).getAggregations().get("parent_names"); + assertThat(parents.getBuckets().size(), equalTo(1)); + assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Bob")); + assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L)); + children = parents.getBuckets().get(0).getAggregations().get("child_docs"); + assertThat(children.getDocCount(), equalTo(2L)); + } + private static final class Control { final String category; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 9a1d498ad6b..a39e12f00de 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -35,8 +35,8 @@ import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptModule; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -1243,7 +1243,7 @@ public class DateHistogramIT extends ESIntegTestCase { for (int i = 0; i < buckets.size(); i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); - assertThat("Bucket " + i + " had wrong key", (DateTime) bucket.getKey(), equalTo(new DateTime(timeZoneStartToday.getMillis() + (i * 60 * 60 * 1000), DateTimeZone.UTC))); + assertThat("InternalBucket " + i + " had wrong key", (DateTime) bucket.getKey(), equalTo(new DateTime(timeZoneStartToday.getMillis() + (i * 60 * 60 * 1000), DateTimeZone.UTC))); if (i == 0 || i == 12) { assertThat(bucket.getDocCount(), equalTo(1l)); } else { @@ -1429,7 +1429,7 @@ public class DateHistogramIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -1555,7 +1555,7 @@ public class DateHistogramIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java index eed080071bb..4fbbef8a58d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket; import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.ObjectIntMap; import com.carrotsearch.hppc.cursors.ObjectIntCursor; - import org.apache.lucene.util.GeoHashUtils; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index a1f4b20dc1c..349b61fc37e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -423,10 +423,10 @@ public class NestedIT extends ESIntegTestCase { Terms startDate = response.getAggregations().get("startDate"); assertThat(startDate.getBuckets().size(), equalTo(2)); - Terms.Bucket bucket = startDate.getBucketByKey("1414800000000"); // 2014-11-01T00:00:00.000Z + Terms.Bucket bucket = startDate.getBucketByKey("2014-11-01T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1l)); Terms endDate = bucket.getAggregations().get("endDate"); - bucket = endDate.getBucketByKey("1417305600000"); // 2014-11-30T00:00:00.000Z + bucket = endDate.getBucketByKey("2014-11-30T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1l)); Terms period = bucket.getAggregations().get("period"); bucket = period.getBucketByKey("2014-11"); @@ -440,10 +440,10 @@ public class NestedIT extends ESIntegTestCase { Terms tags = nestedTags.getAggregations().get("tag"); assertThat(tags.getBuckets().size(), equalTo(0)); // and this must be empty - bucket = startDate.getBucketByKey("1417392000000"); // 2014-12-01T00:00:00.000Z + bucket = startDate.getBucketByKey("2014-12-01T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1l)); endDate = bucket.getAggregations().get("endDate"); - bucket = endDate.getBucketByKey("1419984000000"); // 2014-12-31T00:00:00.000Z + bucket = endDate.getBucketByKey("2014-12-31T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1l)); period = bucket.getAggregations().get("period"); bucket = period.getBucketByKey("2014-12"); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java index 0dce2000f1b..607b6902f8c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -22,8 +22,6 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.junit.Ignore; import java.util.ArrayList; import java.util.Arrays; @@ -31,7 +29,6 @@ import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.is; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 1f77ca5bb64..6c1e7dfcabf 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -41,24 +41,38 @@ import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceSco import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.*; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods; -import org.junit.Test; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; /** * diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index d20dff0ae05..0fe9113e8f8 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -49,10 +49,19 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import static org.elasticsearch.test.VersionUtils.randomVersion; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java index ac146706eb5..db02d6ccb03 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java @@ -364,7 +364,7 @@ public class AvgIT extends AbstractNumericTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -500,7 +500,7 @@ public class AvgIT extends AbstractNumericTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -585,4 +585,4 @@ public class AvgIT extends AbstractNumericTestCase { public void scriptRemoved(CompiledScript script) { } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java index 6419e9dcac3..0f94f142133 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java @@ -36,7 +36,13 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.geoBound import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.sameInstance; /** * @@ -232,7 +238,7 @@ public class GeoBoundsIT extends AbstractGeoTestCase { for (int i = 0; i < 10; i++) { Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); - assertThat("Bucket " + bucket.getKey() + " has wrong number of documents", bucket.getDocCount(), equalTo(1l)); + assertThat("InternalBucket " + bucket.getKey() + " has wrong number of documents", bucket.getDocCount(), equalTo(1l)); GeoBounds geoBounds = bucket.getAggregations().get(aggName); assertThat(geoBounds, notNullValue()); assertThat(geoBounds.getName(), equalTo(aggName)); @@ -259,4 +265,4 @@ public class GeoBoundsIT extends AbstractGeoTestCase { assertThat(bottomRight.lat(), closeTo(1.0, GEOHASH_TOLERANCE)); assertThat(bottomRight.lon(), closeTo(0.0, GEOHASH_TOLERANCE)); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index d87de000108..2c27bde57dc 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -359,7 +359,7 @@ public class SumIT extends AbstractNumericTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -497,7 +497,7 @@ public class SumIT extends AbstractNumericTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -583,4 +583,4 @@ public class SumIT extends AbstractNumericTestCase { public void scriptRemoved(CompiledScript script) { } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index fde7256ad01..903c1bab356 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -22,8 +22,14 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.*; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount; import org.elasticsearch.search.lookup.LeafSearchLookup; @@ -31,7 +37,10 @@ import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; -import java.util.*; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -244,7 +253,7 @@ public class ValueCountIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -330,4 +339,4 @@ public class ValueCountIT extends ESIntegTestCase { public void scriptRemoved(CompiledScript script) { } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlusTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlusTests.java index 9a00297c57e..e58899807ab 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlusTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlusTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.metrics.cardinality; import com.carrotsearch.hppc.BitMixer; import com.carrotsearch.hppc.IntHashSet; - import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java index b65a86ac57d..d76f88a1a93 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java @@ -185,7 +185,7 @@ public class DerivativeIT extends ESIntegTestCase { for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); if (i > 0) { assertThat(docCountDeriv, notNullValue()); @@ -224,7 +224,7 @@ public class DerivativeIT extends ESIntegTestCase { for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); Derivative docCountDeriv = bucket.getAggregations().get("deriv"); if (i > 0) { assertThat(docCountDeriv, notNullValue()); @@ -267,7 +267,7 @@ public class DerivativeIT extends ESIntegTestCase { // overwritten for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long expectedSum = valueCounts[i] * (i * interval); @@ -312,7 +312,7 @@ public class DerivativeIT extends ESIntegTestCase { // overwritten for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); Stats stats = bucket.getAggregations().get("stats"); assertThat(stats, notNullValue()); long expectedSum = valueCounts[i] * (i * interval); @@ -366,7 +366,7 @@ public class DerivativeIT extends ESIntegTestCase { for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); if (i > 0) { assertThat(docCountDeriv, notNullValue()); @@ -395,7 +395,7 @@ public class DerivativeIT extends ESIntegTestCase { for (int i = 0; i < valueCounts_empty.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); if (firstDerivValueCounts_empty[i] == null) { assertThat(docCountDeriv, nullValue()); @@ -425,7 +425,7 @@ public class DerivativeIT extends ESIntegTestCase { for (int i = 0; i < valueCounts_empty_rnd.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty_rnd[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty_rnd[i]); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); if (firstDerivValueCounts_empty_rnd[i] == null) { assertThat(docCountDeriv, nullValue()); @@ -454,7 +454,7 @@ public class DerivativeIT extends ESIntegTestCase { for (int i = 0; i < valueCounts_empty.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i + ": ", bucket, i, valueCounts_empty[i]); + checkBucketKeyAndDocCount("InternalBucket " + i + ": ", bucket, i, valueCounts_empty[i]); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); if (firstDerivValueCounts_empty[i] == null) { assertThat(docCountDeriv, nullValue()); @@ -484,7 +484,7 @@ public class DerivativeIT extends ESIntegTestCase { double lastSumValue = Double.NaN; for (int i = 0; i < valueCounts_empty.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); Sum sum = bucket.getAggregations().get("sum"); double thisSumValue = sum.value(); if (bucket.getDocCount() == 0) { @@ -526,7 +526,7 @@ public class DerivativeIT extends ESIntegTestCase { double lastSumValue = Double.NaN; for (int i = 0; i < valueCounts_empty.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); Sum sum = bucket.getAggregations().get("sum"); double thisSumValue = sum.value(); if (bucket.getDocCount() == 0) { @@ -565,7 +565,7 @@ public class DerivativeIT extends ESIntegTestCase { double lastSumValue = Double.NaN; for (int i = 0; i < valueCounts_empty_rnd.length; i++) { Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty_rnd[i]); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty_rnd[i]); Sum sum = bucket.getAggregations().get("sum"); double thisSumValue = sum.value(); if (bucket.getDocCount() == 0) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java index 7a359ceb39d..3b4e84f8e7c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.support; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.BytesRef; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java index 18e93656562..e6b0981f544 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.support; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.search.Scorer; import org.apache.lucene.util.BytesRef; import org.elasticsearch.script.LeafSearchScript; diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 8d401e5e2e6..28874d2e2a4 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -19,10 +19,10 @@ package org.elasticsearch.search.basic; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; @@ -93,4 +93,4 @@ public class SearchWhileCreatingIndexIT extends ESIntegTestCase { cluster().wipeIndices("test"); } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java index 69c4bbdbd11..1f421292371 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java @@ -61,7 +61,7 @@ public class SearchWhileRelocatingIT extends ESIntegTestCase { final int numShards = between(1, 20); client().admin().indices().prepareCreate("test") .setSettings(settingsBuilder().put("index.number_of_shards", numShards).put("index.number_of_replicas", numberOfReplicas)) - .addMapping("type1", "loc", "type=geo_point", "test", "type=string").execute().actionGet(); + .addMapping("type", "loc", "type=geo_point", "test", "type=string").execute().actionGet(); ensureGreen(); List indexBuilders = new ArrayList<>(); final int numDocs = between(10, 20); diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java index 4586612b007..cbc7f93ff5a 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java @@ -22,12 +22,12 @@ package org.elasticsearch.search.basic; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.GeohashCellQuery; diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 4be2b36fbe6..45be05c10d8 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -21,10 +21,10 @@ package org.elasticsearch.search.child; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; @@ -1176,7 +1176,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { .endObject().endObject()).get(); fail(); } catch (IllegalArgumentException e) { - assertThat(e.toString(), containsString("Merge failed with failures {[The _parent field's type option can't be changed: [null]->[parent]")); + assertThat(e.toString(), containsString("The _parent field's type option can't be changed: [null]->[parent]")); } } diff --git a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java index f00b72bfa8f..60810ee4df6 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java @@ -81,7 +81,7 @@ public class NestedChildrenFilterTests extends ESTestCase { int checkedParents = 0; final Weight parentsWeight = searcher.createNormalizedWeight(new TermQuery(new Term("type", "parent")), false); for (LeafReaderContext leaf : reader.leaves()) { - DocIdSetIterator parents = parentsWeight.scorer(leaf); + DocIdSetIterator parents = parentsWeight.scorer(leaf).iterator(); for (int parentDoc = parents.nextDoc(); parentDoc != DocIdSetIterator.NO_MORE_DOCS ; parentDoc = parents.nextDoc()) { int expectedChildDocs = leaf.reader().document(parentDoc).getField("num_child_docs").numericValue().intValue(); hitContext.reset(null, leaf, parentDoc, searcher); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index dfa28947ca2..cea8df63a4b 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -23,7 +23,6 @@ import com.spatial4j.core.context.SpatialContext; import com.spatial4j.core.distance.DistanceUtils; import com.spatial4j.core.exception.InvalidShapeException; import com.spatial4j.core.shape.Shape; - import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.query.SpatialArgs; @@ -79,7 +78,11 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirs import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index 847e03e5c44..1ae211bc242 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -47,7 +47,10 @@ import static org.elasticsearch.test.geo.RandomShapeGenerator.xRandomPoint; import static org.elasticsearch.test.geo.RandomShapeGenerator.xRandomRectangle; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.nullValue; public class GeoShapeQueryTests extends ESSingleNodeTestCase { public void testNullShape() throws Exception { @@ -396,6 +399,12 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase { .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); + // no shape + filter = QueryBuilders.geoShapeQuery("location", ShapeBuilders.newGeometryCollection()); + result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery()) + .setPostFilter(filter).get(); + assertSearchResponse(result); + assertHitCount(result, 0); } public void testPointsOnly() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java index 5a8d7c0150a..05b999a9196 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.search.highlight; -import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; @@ -52,12 +51,12 @@ public class CustomHighlighter implements Highlighter { } List responses = new ArrayList<>(); - responses.add(new StringText(String.format(Locale.ENGLISH, "standard response for %s at position %s", field.field(), + responses.add(new Text(String.format(Locale.ENGLISH, "standard response for %s at position %s", field.field(), cacheEntry.position))); if (field.fieldOptions().options() != null) { for (Map.Entry entry : field.fieldOptions().options().entrySet()) { - responses.add(new StringText("field:" + entry.getKey() + ":" + entry.getValue())); + responses.add(new Text("field:" + entry.getKey() + ":" + entry.getValue())); } } diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 63378baa721..41fe4975e4b 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.highlight; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -54,6 +53,7 @@ import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.boostingQuery; import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhrasePrefixQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; @@ -66,7 +66,6 @@ import static org.elasticsearch.index.query.QueryBuilders.regexpQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.typeQuery; import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; -import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.search.builder.SearchSourceBuilder.highlight; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -234,13 +233,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { .field("search_analyzer", "search_autocomplete") .field("term_vector", "with_positions_offsets") .endObject() - .startObject("name") + .endObject() .field("type", "string") .endObject() .endObject() - .field("type", "multi_field") - .endObject() - .endObject() .endObject()) .setSettings(settingsBuilder() .put(indexSettings()) @@ -901,14 +897,11 @@ public class HighlighterSearchIT extends ESIntegTestCase { .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("foo") - .field("type", "multi_field") + .field("type", "string") + .field("termVector", "with_positions_offsets") + .field("store", "yes") + .field("analyzer", "english") .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("termVector", "with_positions_offsets") - .field("store", "yes") - .field("analyzer", "english") - .endObject() .startObject("plain") .field("type", "string") .field("termVector", "with_positions_offsets") @@ -917,14 +910,11 @@ public class HighlighterSearchIT extends ESIntegTestCase { .endObject() .endObject() .startObject("bar") - .field("type", "multi_field") + .field("type", "string") + .field("termVector", "with_positions_offsets") + .field("store", "yes") + .field("analyzer", "english") .startObject("fields") - .startObject("bar") - .field("type", "string") - .field("termVector", "with_positions_offsets") - .field("store", "yes") - .field("analyzer", "english") - .endObject() .startObject("plain") .field("type", "string") .field("termVector", "with_positions_offsets") @@ -1195,8 +1185,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperVectorWithStore() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); @@ -1223,8 +1213,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperVectorFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); @@ -1253,8 +1243,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperNoVectorWithStore() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); @@ -1283,8 +1273,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperNoVectorFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); @@ -1557,7 +1547,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .fragmenter("simple"))).get(); assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); + assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) @@ -1566,7 +1556,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .fragmenter("span"))).get(); assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); + assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); assertFailures(client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) @@ -2062,7 +2052,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy quick dog")); + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy quick dog")); } public void testPostingsHighlighterMultipleFields() throws Exception { @@ -2220,8 +2210,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1") .startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); @@ -2252,8 +2242,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testPostingsHighlighterMultiMapperFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "multi_field").startObject("fields") - .startObject("title").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "classic").endObject() + .startObject("title").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "classic") + .startObject("fields") .startObject("key").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/PlainHighlighterTests.java b/core/src/test/java/org/elasticsearch/search/highlight/PlainHighlighterTests.java new file mode 100644 index 00000000000..5156209d6f1 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/highlight/PlainHighlighterTests.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.highlight; + +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.highlight.QueryScorer; +import org.apache.lucene.util.LuceneTestCase; + +public class PlainHighlighterTests extends LuceneTestCase { + + public void testHighlightPhrase() throws Exception { + Query query = new PhraseQuery.Builder() + .add(new Term("field", "foo")) + .add(new Term("field", "bar")) + .build(); + QueryScorer queryScorer = new CustomQueryScorer(query); + org.apache.lucene.search.highlight.Highlighter highlighter = new org.apache.lucene.search.highlight.Highlighter(queryScorer); + String[] frags = highlighter.getBestFragments(new MockAnalyzer(random()), "field", "bar foo bar foo", 10); + assertArrayEquals(new String[] {"bar foo bar foo"}, frags); + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index 65fbbd3340d..daa996be702 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -21,10 +21,10 @@ package org.elasticsearch.search.innerhits; import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.Version; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; diff --git a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java index cc631d5df2a..0525fd28db1 100644 --- a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java +++ b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.search.internal; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.InputStreamStreamInput; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; @@ -39,25 +39,25 @@ public class InternalSearchHitTests extends ESTestCase { SearchShardTarget target = new SearchShardTarget("_node_id", "_index", 0); Map innerHits = new HashMap<>(); - InternalSearchHit innerHit1 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit innerHit1 = new InternalSearchHit(0, "_id", new Text("_type"), null); innerHit1.shardTarget(target); - InternalSearchHit innerInnerHit2 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit innerInnerHit2 = new InternalSearchHit(0, "_id", new Text("_type"), null); innerInnerHit2.shardTarget(target); innerHits.put("1", new InternalSearchHits(new InternalSearchHit[]{innerInnerHit2}, 1, 1f)); innerHit1.setInnerHits(innerHits); - InternalSearchHit innerHit2 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit innerHit2 = new InternalSearchHit(0, "_id", new Text("_type"), null); innerHit2.shardTarget(target); - InternalSearchHit innerHit3 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit innerHit3 = new InternalSearchHit(0, "_id", new Text("_type"), null); innerHit3.shardTarget(target); innerHits = new HashMap<>(); - InternalSearchHit hit1 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit hit1 = new InternalSearchHit(0, "_id", new Text("_type"), null); innerHits.put("1", new InternalSearchHits(new InternalSearchHit[]{innerHit1, innerHit2}, 1, 1f)); innerHits.put("2", new InternalSearchHits(new InternalSearchHit[]{innerHit3}, 1, 1f)); hit1.shardTarget(target); hit1.setInnerHits(innerHits); - InternalSearchHit hit2 = new InternalSearchHit(0, "_id", new StringText("_type"), null); + InternalSearchHit hit2 = new InternalSearchHit(0, "_id", new Text("_type"), null); hit2.shardTarget(target); InternalSearchHits hits = new InternalSearchHits(new InternalSearchHit[]{hit1, hit2}, 2, 1f); diff --git a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index f281eb3281f..ae163eaf4a6 100644 --- a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -19,12 +19,12 @@ package org.elasticsearch.search.morelikethis; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; diff --git a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 39d052400cb..61890092831 100644 --- a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.nested; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.join.ScoreMode; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.delete.DeleteResponse; @@ -29,6 +28,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; diff --git a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java index 93e94c49b47..fa9626964e8 100644 --- a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java +++ b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java @@ -19,9 +19,9 @@ package org.elasticsearch.search.preference; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/search/profile/ProfileTests.java b/core/src/test/java/org/elasticsearch/search/profile/ProfileTests.java new file mode 100644 index 00000000000..83f6efaa150 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/profile/ProfileTests.java @@ -0,0 +1,173 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field.Store; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RandomApproximationQuery; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TotalHitCountCollector; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.TestUtil; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +public class ProfileTests extends ESTestCase { + + static Directory dir; + static IndexReader reader; + static ContextIndexSearcher searcher; + + @BeforeClass + public static void before() throws IOException { + dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + final int numDocs = TestUtil.nextInt(random(), 1, 20); + for (int i = 0; i < numDocs; ++i) { + final int numHoles = random().nextInt(5); + for (int j = 0; j < numHoles; ++j) { + w.addDocument(new Document()); + } + Document doc = new Document(); + doc.add(new StringField("foo", "bar", Store.NO)); + w.addDocument(doc); + } + reader = w.getReader(); + w.close(); + Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader)); + searcher = new ContextIndexSearcher(engineSearcher, IndexSearcher.getDefaultQueryCache(), MAYBE_CACHE_POLICY); + } + + @AfterClass + public static void after() throws IOException { + IOUtils.close(reader, dir); + dir = null; + reader = null; + searcher = null; + } + + public void testBasic() throws IOException { + Profiler profiler = new Profiler(); + searcher.setProfiler(profiler); + Query query = new TermQuery(new Term("foo", "bar")); + searcher.search(query, 1); + List results = profiler.getQueryTree(); + assertEquals(1, results.size()); + Map breakdown = results.get(0).getTimeBreakdown(); + assertThat(breakdown.get(ProfileBreakdown.TimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.ADVANCE.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.SCORE.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.MATCH.toString()).longValue(), equalTo(0L)); + + long rewriteTime = profiler.getRewriteTime(); + assertThat(rewriteTime, greaterThan(0L)); + } + + public void testNoScoring() throws IOException { + Profiler profiler = new Profiler(); + searcher.setProfiler(profiler); + Query query = new TermQuery(new Term("foo", "bar")); + searcher.search(query, 1, Sort.INDEXORDER); // scores are not needed + List results = profiler.getQueryTree(); + assertEquals(1, results.size()); + Map breakdown = results.get(0).getTimeBreakdown(); + assertThat(breakdown.get(ProfileBreakdown.TimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.ADVANCE.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.SCORE.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.MATCH.toString()).longValue(), equalTo(0L)); + + long rewriteTime = profiler.getRewriteTime(); + assertThat(rewriteTime, greaterThan(0L)); + } + + public void testUseIndexStats() throws IOException { + Profiler profiler = new Profiler(); + searcher.setProfiler(profiler); + Query query = new TermQuery(new Term("foo", "bar")); + searcher.count(query); // will use index stats + List results = profiler.getQueryTree(); + assertEquals(0, results.size()); + + long rewriteTime = profiler.getRewriteTime(); + assertThat(rewriteTime, greaterThan(0L)); + } + + public void testApproximations() throws IOException { + Profiler profiler = new Profiler(); + Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader)); + // disable query caching since we want to test approximations, which won't + // be exposed on a cached entry + ContextIndexSearcher searcher = new ContextIndexSearcher(engineSearcher, null, MAYBE_CACHE_POLICY); + searcher.setProfiler(profiler); + Query query = new RandomApproximationQuery(new TermQuery(new Term("foo", "bar")), random()); + searcher.count(query); + List results = profiler.getQueryTree(); + assertEquals(1, results.size()); + Map breakdown = results.get(0).getTimeBreakdown(); + assertThat(breakdown.get(ProfileBreakdown.TimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.ADVANCE.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.SCORE.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(ProfileBreakdown.TimingType.MATCH.toString()).longValue(), greaterThan(0L)); + + long rewriteTime = profiler.getRewriteTime(); + assertThat(rewriteTime, greaterThan(0L)); + + } + + public void testCollector() throws IOException { + TotalHitCountCollector collector = new TotalHitCountCollector(); + ProfileCollector profileCollector = new ProfileCollector(collector); + assertEquals(0, profileCollector.getTime()); + final LeafCollector leafCollector = profileCollector.getLeafCollector(reader.leaves().get(0)); + assertThat(profileCollector.getTime(), greaterThan(0L)); + long time = profileCollector.getTime(); + leafCollector.setScorer(Lucene.illegalScorer("dummy scorer")); + assertThat(profileCollector.getTime(), greaterThan(time)); + time = profileCollector.getTime(); + leafCollector.collect(0); + assertThat(profileCollector.getTime(), greaterThan(time)); + } +} diff --git a/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java new file mode 100644 index 00000000000..f09b18bdb8a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java @@ -0,0 +1,613 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.util.English; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.search.profile.RandomQueryGenerator.randomQueryBuilder; +import static org.elasticsearch.test.hamcrest.DoubleMatcher.nearlyEqual; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.isEmptyOrNullString; +import static org.hamcrest.Matchers.not; + + +public class QueryProfilerIT extends ESIntegTestCase { + + /** + * This test simply checks to make sure nothing crashes. Test indexes 100-150 documents, + * constructs 20-100 random queries and tries to profile them + */ + public void testProfileQuery() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + List stringFields = Arrays.asList("field1"); + List numericFields = Arrays.asList("field2"); + + indexRandom(true, docs); + + refresh(); + int iters = between(20, 100); + for (int i = 0; i < iters; i++) { + QueryBuilder q = randomQueryBuilder(stringFields, numericFields, numDocs, 3); + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + for (Map.Entry> shard : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shard.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + + } + } + + /** + * This test generates 1-10 random queries and executes a profiled and non-profiled + * search for each query. It then does some basic sanity checking of score and hits + * to make sure the profiling doesn't interfere with the hits being returned + */ + public void testProfileMatchesRegular() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + List stringFields = Arrays.asList("field1"); + List numericFields = Arrays.asList("field2"); + + indexRandom(true, docs); + + refresh(); + int iters = between(1, 10); + for (int i = 0; i < iters; i++) { + QueryBuilder q = randomQueryBuilder(stringFields, numericFields, numDocs, 3); + logger.info(q.toString()); + + + SearchRequestBuilder vanilla = client().prepareSearch("test") + .setQuery(q) + .setProfile(false) + .addSort("_score", SortOrder.DESC) + .addSort("_uid", SortOrder.ASC) + .setPreference("_primary") + .setSearchType(SearchType.QUERY_THEN_FETCH); + + SearchRequestBuilder profile = client().prepareSearch("test") + .setQuery(q) + .setProfile(true) + .addSort("_score", SortOrder.DESC) + .addSort("_uid", SortOrder.ASC) + .setPreference("_primary") + .setSearchType(SearchType.QUERY_THEN_FETCH); + + MultiSearchResponse.Item[] responses = client().prepareMultiSearch() + .add(vanilla) + .add(profile) + .execute().actionGet().getResponses(); + + SearchResponse vanillaResponse = responses[0].getResponse(); + SearchResponse profileResponse = responses[1].getResponse(); + + float vanillaMaxScore = vanillaResponse.getHits().getMaxScore(); + float profileMaxScore = profileResponse.getHits().getMaxScore(); + if (Float.isNaN(vanillaMaxScore)) { + assertTrue("Vanilla maxScore is NaN but Profile is not [" + profileMaxScore + "]", + Float.isNaN(profileMaxScore)); + } else { + assertTrue("Profile maxScore of [" + profileMaxScore + "] is not close to Vanilla maxScore [" + vanillaMaxScore + "]", + nearlyEqual(vanillaMaxScore, profileMaxScore, 0.001)); + } + + assertThat("Profile totalHits of [" + profileResponse.getHits().totalHits() + "] is not close to Vanilla totalHits [" + vanillaResponse.getHits().totalHits() + "]", + vanillaResponse.getHits().getTotalHits(), equalTo(profileResponse.getHits().getTotalHits())); + + SearchHit[] vanillaHits = vanillaResponse.getHits().getHits(); + SearchHit[] profileHits = profileResponse.getHits().getHits(); + + for (int j = 0; j < vanillaHits.length; j++) { + assertThat("Profile hit #" + j + " has a different ID from Vanilla", + vanillaHits[j].getId(), equalTo(profileHits[j].getId())); + } + + } + } + + /** + * This test verifies that the output is reasonable for a simple, non-nested query + */ + public void testSimpleMatch() throws Exception { + createIndex("test"); + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + ensureGreen(); + + QueryBuilder q = QueryBuilders.matchQuery("field1", "one"); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + Map> p = resp.getProfileResults(); + assertNotNull(p); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertEquals(result.getQueryName(), "TermQuery"); + assertEquals(result.getLuceneDescription(), "field1:one"); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + /** + * This test verifies that the output is reasonable for a nested query + */ + public void testBool() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + QueryBuilder q = QueryBuilders.boolQuery().must(QueryBuilders.matchQuery("field1", "one")).must(QueryBuilders.matchQuery("field1", "two")); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + Map> p = resp.getProfileResults(); + assertNotNull(p); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertEquals(result.getQueryName(), "BooleanQuery"); + assertEquals(result.getLuceneDescription(), "+field1:one +field1:two"); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + assertEquals(result.getProfiledChildren().size(), 2); + + // Check the children + List children = result.getProfiledChildren(); + assertEquals(children.size(), 2); + + ProfileResult childProfile = children.get(0); + assertEquals(childProfile.getQueryName(), "TermQuery"); + assertEquals(childProfile.getLuceneDescription(), "field1:one"); + assertThat(childProfile.getTime(), greaterThan(0L)); + assertNotNull(childProfile.getTimeBreakdown()); + assertEquals(childProfile.getProfiledChildren().size(), 0); + + childProfile = children.get(1); + assertEquals(childProfile.getQueryName(), "TermQuery"); + assertEquals(childProfile.getLuceneDescription(), "field1:two"); + assertThat(childProfile.getTime(), greaterThan(0L)); + assertNotNull(childProfile.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + + + } + + /** + * Tests a boolean query with no children clauses + */ + public void testEmptyBool() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.boolQuery(); + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + /** + * Tests a series of three nested boolean queries with a single "leaf" match query. + * The rewrite process will "collapse" this down to a single bool, so this tests to make sure + * nothing catastrophic happens during that fairly substantial rewrite + */ + public void testCollapsingBool() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.boolQuery().must(QueryBuilders.boolQuery().must(QueryBuilders.boolQuery().must(QueryBuilders.matchQuery("field1", "one")))); + + + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + public void testBoosting() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.boostingQuery(QueryBuilders.matchQuery("field1", "one"), QueryBuilders.matchQuery("field1", "two")) + .boost(randomFloat()) + .negativeBoost(randomFloat()); + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + public void testDisMaxRange() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.disMaxQuery() + .boost(0.33703882f) + .add(QueryBuilders.rangeQuery("field2").from(null).to(73).includeLower(true).includeUpper(true)); + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + public void testRange() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.rangeQuery("field2").from(0).to(5); + + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + public void testPhrase() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i) + " " + English.intToEnglish(i+1), + "field2", i + ); + } + + indexRandom(true, docs); + + refresh(); + + QueryBuilder q = QueryBuilders.matchPhraseQuery("field1", "one two"); + + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch() + .setQuery(q) + .setIndices("test") + .setTypes("type1") + .setProfile(true) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .execute().actionGet(); + + if (resp.getShardFailures().length > 0) { + for (ShardSearchFailure f : resp.getShardFailures()) { + logger.error(f.toString()); + } + fail(); + } + + assertNotNull("Profile response element should not be null", resp.getProfileResults()); + assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); + + for (Map.Entry> shardResult : resp.getProfileResults().entrySet()) { + for (ProfileShardResult searchProfiles : shardResult.getValue()) { + for (ProfileResult result : searchProfiles.getQueryResults()) { + assertNotNull(result.getQueryName()); + assertNotNull(result.getLuceneDescription()); + assertThat(result.getTime(), greaterThan(0L)); + assertNotNull(result.getTimeBreakdown()); + } + + CollectorResult result = searchProfiles.getCollectorResult(); + assertThat(result.getName(), not(isEmptyOrNullString())); + assertThat(result.getTime(), greaterThan(0L)); + } + } + } + + /** + * This test makes sure no profile results are returned when profiling is disabled + */ + public void testNoProfile() throws Exception { + createIndex("test"); + ensureGreen(); + + int numDocs = randomIntBetween(100, 150); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( + "field1", English.intToEnglish(i), + "field2", i + ); + } + + indexRandom(true, docs); + refresh(); + QueryBuilder q = QueryBuilders.rangeQuery("field2").from(0).to(5); + + logger.info(q.toString()); + + SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(false).execute().actionGet(); + assertThat("Profile response element should be an empty map", resp.getProfileResults().size(), equalTo(0)); + } + +} + diff --git a/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java b/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java new file mode 100644 index 00000000000..9eb41086bed --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java @@ -0,0 +1,277 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +import org.apache.lucene.util.English; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.CommonTermsQueryBuilder; +import org.elasticsearch.index.query.DisMaxQueryBuilder; +import org.elasticsearch.index.query.FuzzyQueryBuilder; +import org.elasticsearch.index.query.IdsQueryBuilder; +import org.elasticsearch.index.query.Operator; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.RangeQueryBuilder; + +import java.util.ArrayList; +import java.util.List; + +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomFloat; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween; +import static org.junit.Assert.assertTrue; + + +public class RandomQueryGenerator { + public static QueryBuilder randomQueryBuilder(List stringFields, List numericFields, int numDocs, int depth) { + assertTrue("Must supply at least one string field", stringFields.size() > 0); + assertTrue("Must supply at least one numeric field", numericFields.size() > 0); + + // If depth is exhausted, or 50% of the time return a terminal + // Helps limit ridiculously large compound queries + if (depth == 0 || randomBoolean()) { + return randomTerminalQuery(stringFields, numericFields, numDocs); + } + + switch (randomIntBetween(0,5)) { + case 0: + return randomTerminalQuery(stringFields, numericFields, numDocs); + case 1: + return QueryBuilders.boolQuery().must(randomQueryBuilder(stringFields, numericFields, numDocs, depth -1)) + .filter(randomQueryBuilder(stringFields, numericFields, numDocs, depth -1)); + case 2: + return randomBoolQuery(stringFields, numericFields, numDocs, depth); + case 3: + // disabled for now because of https://issues.apache.org/jira/browse/LUCENE-6781 + //return randomBoostingQuery(stringFields, numericFields, numDocs, depth); + case 4: + return randomConstantScoreQuery(stringFields, numericFields, numDocs, depth); + case 5: + return randomDisMaxQuery(stringFields, numericFields, numDocs, depth); + default: + return randomTerminalQuery(stringFields, numericFields, numDocs); + } + } + + private static QueryBuilder randomTerminalQuery(List stringFields, List numericFields, int numDocs) { + switch (randomIntBetween(0,6)) { + case 0: + return randomTermQuery(stringFields, numDocs); + case 1: + return randomTermsQuery(stringFields, numDocs); + case 2: + return randomRangeQuery(numericFields, numDocs); + case 3: + return QueryBuilders.matchAllQuery(); + case 4: + return randomCommonTermsQuery(stringFields, numDocs); + case 5: + return randomFuzzyQuery(stringFields); + case 6: + return randomIDsQuery(); + default: + return randomTermQuery(stringFields, numDocs); + } + } + + private static String randomQueryString(int max) { + StringBuilder qsBuilder = new StringBuilder(); + + for (int i = 0; i < max; i++) { + qsBuilder.append(English.intToEnglish(randomInt(max))); + qsBuilder.append(" "); + } + + return qsBuilder.toString().trim(); + } + + private static String randomField(List fields) { + return fields.get(randomInt(fields.size() - 1)); + } + + + + private static QueryBuilder randomTermQuery(List fields, int numDocs) { + return QueryBuilders.termQuery(randomField(fields), randomQueryString(1)); + } + + private static QueryBuilder randomTermsQuery(List fields, int numDocs) { + int numTerms = randomInt(numDocs); + ArrayList terms = new ArrayList<>(numTerms); + + for (int i = 0; i < numTerms; i++) { + terms.add(randomQueryString(1)); + } + + return QueryBuilders.termsQuery(randomField(fields), terms); + } + + private static QueryBuilder randomRangeQuery(List fields, int numDocs) { + QueryBuilder q = QueryBuilders.rangeQuery(randomField(fields)); + + if (randomBoolean()) { + ((RangeQueryBuilder)q).from(randomIntBetween(0, numDocs / 2 - 1)); + } + if (randomBoolean()) { + ((RangeQueryBuilder)q).to(randomIntBetween(numDocs / 2, numDocs)); + } + + return q; + } + + private static QueryBuilder randomBoolQuery(List stringFields, List numericFields, int numDocs, int depth) { + QueryBuilder q = QueryBuilders.boolQuery(); + int numClause = randomIntBetween(0,5); + for (int i = 0; i < numClause; i++) { + ((BoolQueryBuilder)q).must(randomQueryBuilder(stringFields, numericFields,numDocs, depth -1)); + } + + numClause = randomIntBetween(0,5); + for (int i = 0; i < numClause; i++) { + ((BoolQueryBuilder)q).should(randomQueryBuilder(stringFields, numericFields,numDocs, depth -1)); + } + + numClause = randomIntBetween(0,5); + for (int i = 0; i < numClause; i++) { + ((BoolQueryBuilder)q).mustNot(randomQueryBuilder(stringFields, numericFields, numDocs, depth -1)); + } + + return q; + } + + private static QueryBuilder randomBoostingQuery(List stringFields, List numericFields, int numDocs, int depth) { + return QueryBuilders.boostingQuery( + randomQueryBuilder(stringFields, numericFields, numDocs, depth - 1), + randomQueryBuilder(stringFields, numericFields, numDocs, depth - 1)) + .boost(randomFloat()) + .negativeBoost(randomFloat()); + } + + private static QueryBuilder randomConstantScoreQuery(List stringFields, List numericFields, int numDocs, int depth) { + return QueryBuilders.constantScoreQuery(randomQueryBuilder(stringFields, numericFields, numDocs, depth - 1)); + } + + private static QueryBuilder randomCommonTermsQuery(List fields, int numDocs) { + int numTerms = randomInt(numDocs); + + QueryBuilder q = QueryBuilders.commonTermsQuery(randomField(fields), randomQueryString(numTerms)); + if (randomBoolean()) { + ((CommonTermsQueryBuilder)q).boost(randomFloat()); + } + + if (randomBoolean()) { + ((CommonTermsQueryBuilder)q).cutoffFrequency(randomFloat()); + } + + if (randomBoolean()) { + ((CommonTermsQueryBuilder)q).highFreqMinimumShouldMatch(Integer.toString(randomInt(numTerms))) + .highFreqOperator(randomBoolean() ? Operator.AND : Operator.OR); + } + + if (randomBoolean()) { + ((CommonTermsQueryBuilder)q).lowFreqMinimumShouldMatch(Integer.toString(randomInt(numTerms))) + .lowFreqOperator(randomBoolean() ? Operator.AND : Operator.OR); + } + + return q; + } + + private static QueryBuilder randomFuzzyQuery(List fields) { + + QueryBuilder q = QueryBuilders.fuzzyQuery(randomField(fields), randomQueryString(1)); + + if (randomBoolean()) { + ((FuzzyQueryBuilder)q).boost(randomFloat()); + } + + if (randomBoolean()) { + switch (randomIntBetween(0, 4)) { + case 0: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.AUTO); + break; + case 1: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.ONE); + break; + case 2: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.TWO); + break; + case 3: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.ZERO); + break; + case 4: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.fromEdits(randomIntBetween(0,2))); + break; + default: + ((FuzzyQueryBuilder)q).fuzziness(Fuzziness.AUTO); + break; + } + } + + if (randomBoolean()) { + ((FuzzyQueryBuilder)q).maxExpansions(Math.abs(randomInt())); + } + + if (randomBoolean()) { + ((FuzzyQueryBuilder)q).prefixLength(Math.abs(randomInt())); + } + + if (randomBoolean()) { + ((FuzzyQueryBuilder)q).transpositions(randomBoolean()); + } + + return q; + } + + private static QueryBuilder randomDisMaxQuery(List stringFields, List numericFields, int numDocs, int depth) { + QueryBuilder q = QueryBuilders.disMaxQuery(); + + int numClauses = randomIntBetween(1, 10); + for (int i = 0; i < numClauses; i++) { + ((DisMaxQueryBuilder)q).add(randomQueryBuilder(stringFields, numericFields, numDocs, depth - 1)); + } + + if (randomBoolean()) { + ((DisMaxQueryBuilder)q).boost(randomFloat()); + } + + if (randomBoolean()) { + ((DisMaxQueryBuilder)q).tieBreaker(randomFloat()); + } + + return q; + } + + private static QueryBuilder randomIDsQuery() { + QueryBuilder q = QueryBuilders.idsQuery(); + + int numIDs = randomInt(100); + for (int i = 0; i < numIDs; i++) { + ((IdsQueryBuilder)q).addIds(String.valueOf(randomInt())); + } + + if (randomBoolean()) { + ((IdsQueryBuilder)q).boost(randomFloat()); + } + + return q; + } +} diff --git a/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java b/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java index 73906b2ed83..a508883e53a 100644 --- a/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java @@ -40,7 +40,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; public class ExistsIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 235438cc442..a789497235e 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 9918d449657..ad9ab044ad3 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -476,39 +476,6 @@ public class SearchQueryIT extends ESIntegTestCase { assertThirdHit(searchResponse, hasId("2")); } - public void testOmitTermFreqsAndPositions() throws Exception { - cluster().wipeTemplates(); // no randomized template for this test -- we are testing bwc compat and set version explicitly this might cause failures if an unsupported feature - // is added randomly via an index template. - Version version = Version.CURRENT; - int iters = scaledRandomIntBetween(10, 20); - for (int i = 0; i < iters; i++) { - try { - // backwards compat test! - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=string,omit_term_freq_and_positions=true") - .setSettings(settings(version).put(SETTING_NUMBER_OF_SHARDS, 1))); - assertThat(version.onOrAfter(Version.V_1_0_0_RC2), equalTo(false)); - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), - client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox")); - - - SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(Type.PHRASE).slop(0)).get(); - assertHitCount(searchResponse, 1l); - try { - client().prepareSearch().setQuery(matchQuery("field1", "quick brown").type(Type.PHRASE).slop(0)).get(); - fail("SearchPhaseExecutionException should have been thrown"); - } catch (SearchPhaseExecutionException e) { - assertTrue(e.toString().contains("IllegalStateException[field \"field1\" was indexed without position data; cannot run PhraseQuery")); - } - cluster().wipeIndices("test"); - } catch (MapperParsingException ex) { - assertThat(version.toString(), version.onOrAfter(Version.V_1_0_0_RC2), equalTo(true)); - assertThat(ex.getCause().getMessage(), equalTo("'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : 'docs'] instead")); - } - version = randomVersion(random()); - } - } - public void testQueryStringAnalyzedWildcard() throws Exception { createIndex("test"); @@ -635,24 +602,8 @@ public class SearchQueryIT extends ESIntegTestCase { assertHitCount(searchResponse, 0l); } - public void testTypeFilterTypeIndexedTests() throws Exception { - typeFilterTests("not_analyzed"); - } - - public void testTypeFilterTypeNotIndexedTests() throws Exception { - typeFilterTests("no"); - } - - private void typeFilterTests(String index) throws Exception { - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - assertAcked(prepareCreate("test").setSettings(indexSettings) - .addMapping("type1", jsonBuilder().startObject().startObject("type1") - .startObject("_type").field("index", index).endObject() - .endObject().endObject()) - .addMapping("type2", jsonBuilder().startObject().startObject("type2") - .startObject("_type").field("index", index).endObject() - .endObject().endObject()) - .setUpdateAllTypes(true)); + public void testTypeFilter() throws Exception { + assertAcked(prepareCreate("test")); indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), client().prepareIndex("test", "type2", "1").setSource("field1", "value1"), client().prepareIndex("test", "type1", "2").setSource("field1", "value1"), @@ -669,19 +620,7 @@ public class SearchQueryIT extends ESIntegTestCase { } public void testIdsQueryTestsIdIndexed() throws Exception { - idsQueryTests("not_analyzed"); - } - - public void testIdsQueryTestsIdNotIndexed() throws Exception { - idsQueryTests("no"); - } - - private void idsQueryTests(String index) throws Exception { - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - assertAcked(client().admin().indices().prepareCreate("test").setSettings(indexSettings) - .addMapping("type1", jsonBuilder().startObject().startObject("type1") - .startObject("_id").field("index", index).endObject() - .endObject().endObject())); + assertAcked(client().admin().indices().prepareCreate("test")); indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), client().prepareIndex("test", "type1", "2").setSource("field1", "value2"), @@ -714,27 +653,13 @@ public class SearchQueryIT extends ESIntegTestCase { assertSearchHits(searchResponse, "1", "3"); } - public void testTermIndexQueryIndexed() throws Exception { - termIndexQueryTests("not_analyzed"); - } - - public void testTermIndexQueryNotIndexed() throws Exception { - termIndexQueryTests("no"); - } - - private void termIndexQueryTests(String index) throws Exception { - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); + public void testTermIndexQuery() throws Exception { String[] indexNames = { "test1", "test2" }; for (String indexName : indexNames) { assertAcked(client() .admin() .indices() - .prepareCreate(indexName) - .setSettings(indexSettings) - .addMapping( - "type1", - jsonBuilder().startObject().startObject("type1").startObject("_index").field("index", index).endObject() - .endObject().endObject())); + .prepareCreate(indexName)); indexRandom(true, client().prepareIndex(indexName, "type1", indexName + "1").setSource("field1", "value1")); @@ -1016,6 +941,59 @@ public class SearchQueryIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("1")); + // Min should match > # optional clauses returns no docs. + multiMatchQuery = multiMatchQuery("value1 value2 value3", "field1", "field2"); + multiMatchQuery.minimumShouldMatch("4"); + searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get(); + assertHitCount(searchResponse, 0l); + } + + public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws IOException { + createIndex("test"); + client().prepareIndex("test", "type1", "1").setSource("field1", new String[]{"value1", "value2", "value3"}).get(); + client().prepareIndex("test", "type1", "2").setSource("field2", "value1").get(); + refresh(); + + BoolQueryBuilder boolQuery = boolQuery() + .must(termQuery("field1", "value1")) + .should(boolQuery() + .should(termQuery("field1", "value1")) + .should(termQuery("field1", "value2")) + .minimumNumberShouldMatch(3)); + SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get(); + assertHitCount(searchResponse, 1l); + assertFirstHit(searchResponse, hasId("1")); + + boolQuery = boolQuery() + .must(termQuery("field1", "value1")) + .should(boolQuery() + .should(termQuery("field1", "value1")) + .should(termQuery("field1", "value2")) + .minimumNumberShouldMatch(1)) + // Only one should clause is defined, returns no docs. + .minimumNumberShouldMatch(2); + searchResponse = client().prepareSearch().setQuery(boolQuery).get(); + assertHitCount(searchResponse, 0l); + + boolQuery = boolQuery() + .should(termQuery("field1", "value1")) + .should(boolQuery() + .should(termQuery("field1", "value1")) + .should(termQuery("field1", "value2")) + .minimumNumberShouldMatch(3)) + .minimumNumberShouldMatch(1); + searchResponse = client().prepareSearch().setQuery(boolQuery).get(); + assertHitCount(searchResponse, 1l); + assertFirstHit(searchResponse, hasId("1")); + + boolQuery = boolQuery() + .must(termQuery("field1", "value1")) + .must(boolQuery() + .should(termQuery("field1", "value1")) + .should(termQuery("field1", "value2")) + .minimumNumberShouldMatch(3)); + searchResponse = client().prepareSearch().setQuery(boolQuery).get(); + assertHitCount(searchResponse, 0l); } public void testFuzzyQueryString() { diff --git a/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java index 720d51508f7..ac723778abb 100644 --- a/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.scroll; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 0f5ac1a522f..fac7f71446a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -24,8 +24,8 @@ import org.apache.lucene.analysis.TokenStreamToAutomaton; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; @@ -47,11 +47,18 @@ import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.FuzzyOptionsBuilder; -import org.elasticsearch.search.suggest.completion.context.*; +import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; +import org.elasticsearch.search.suggest.completion.context.ContextMapping; +import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; @@ -62,7 +69,15 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; @SuppressCodecs("*") // requires custom completion format public class CompletionSuggestSearchIT extends ESIntegTestCase { @@ -586,49 +601,6 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { assertSuggestions("r", "Foo Fighters"); } - public void testThatUpgradeToMultiFieldTypeWorks() throws Exception { - final XContentBuilder mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "string") - .endObject() - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate(INDEX).addMapping(TYPE, mapping)); - client().prepareIndex(INDEX, TYPE, "1").setRefresh(true).setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "multi_field") - .startObject("fields") - .startObject(FIELD).field("type", "string").endObject() - .startObject("suggest").field("type", "completion").field("analyzer", "simple").endObject() - .endObject() - .endObject() - .endObject().endObject() - .endObject()) - .get(); - assertThat(putMappingResponse.isAcknowledged(), is(true)); - - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - new CompletionSuggestionBuilder("suggs").field(FIELD + ".suggest").text("f").size(10) - ).execute().actionGet(); - assertSuggestions(suggestResponse, "suggs"); - - client().prepareIndex(INDEX, TYPE, "1").setRefresh(true).setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - SuggestResponse afterReindexingResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("suggs").field(FIELD + ".suggest").text("f").size(10) - ).execute().actionGet(); - assertSuggestions(afterReindexingResponse, "suggs", "Foo Fighters"); - } - public void testThatUpgradeToMultiFieldsWorks() throws Exception { final XContentBuilder mapping = jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index ae6ec51ac36..18d6d9b99f9 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -19,9 +19,8 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.GeoHashUtils; +import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.common.geo.GeoPoint; @@ -30,11 +29,23 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.suggest.CompletionSuggestSearchIT.CompletionMappingBuilder; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; -import org.elasticsearch.search.suggest.completion.context.*; +import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; +import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; +import org.elasticsearch.search.suggest.completion.context.ContextBuilder; +import org.elasticsearch.search.suggest.completion.context.ContextMapping; +import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; +import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java index 281cf6ae18e..419316b5265 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java @@ -20,7 +20,7 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.CharsRefBuilder; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import java.io.IOException; import java.util.Locale; @@ -42,11 +42,11 @@ public class CustomSuggester extends Suggester> response = new Suggest.Suggestion<>(name, suggestion.getSize()); String firstSuggestion = String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "12"); - Suggest.Suggestion.Entry resultEntry12 = new Suggest.Suggestion.Entry<>(new StringText(firstSuggestion), 0, text.length() + 2); + Suggest.Suggestion.Entry resultEntry12 = new Suggest.Suggestion.Entry<>(new Text(firstSuggestion), 0, text.length() + 2); response.addTerm(resultEntry12); String secondSuggestion = String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "123"); - Suggest.Suggestion.Entry resultEntry123 = new Suggest.Suggestion.Entry<>(new StringText(secondSuggestion), 0, text.length() + 3); + Suggest.Suggestion.Entry resultEntry123 = new Suggest.Suggestion.Entry<>(new Text(secondSuggestion), 0, text.length() + 3); response.addTerm(resultEntry123); return response; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 27a6529bdd8..0d27ba04a91 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -23,19 +23,26 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.suggest.document.ContextSuggestField; -import org.elasticsearch.common.xcontent.*; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; -import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; import org.elasticsearch.search.suggest.completion.context.ContextBuilder; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.*; +import java.util.List; +import java.util.Set; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.isIn; public class CategoryContextMappingTests extends ESSingleNodeTestCase { @@ -52,7 +59,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -73,7 +80,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 7); } @@ -90,7 +97,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -106,7 +113,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -123,7 +130,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -137,7 +144,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -158,7 +165,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); XContentBuilder builder = jsonBuilder() @@ -175,7 +182,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", builder.bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index 60974baee84..b42af82433b 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.GeoHashUtils; -import org.elasticsearch.common.inject.matcher.Matchers; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -30,10 +30,14 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.search.suggest.completion.context.*; +import org.elasticsearch.search.suggest.completion.context.ContextBuilder; +import org.elasticsearch.search.suggest.completion.context.ContextMapping; +import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.suggest.completion.CategoryContextMappingTests.assertContextSuggestFields; @@ -55,7 +59,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -76,7 +80,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 7); } @@ -94,7 +98,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -113,7 +117,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -130,7 +134,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() @@ -152,7 +156,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .field("weight", 5) .endObject() .bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -173,7 +177,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); XContentBuilder builder = jsonBuilder() @@ -190,7 +194,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", builder.bytes()); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } diff --git a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java index ab6a10f3cd2..8912956489f 100644 --- a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java +++ b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java @@ -28,7 +28,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public class SimilarityIT extends ESIntegTestCase { +public class SimilarityIT extends ESIntegTestCase { public void testCustomBM25Similarity() throws Exception { try { client().admin().indices().prepareDelete("test").execute().actionGet(); @@ -45,7 +45,7 @@ public class SimilarityIT extends ESIntegTestCase { .field("type", "string") .endObject() .startObject("field2") - .field("similarity", "default") + .field("similarity", "classic") .field("type", "string") .endObject() .endObject() diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 51ae038ca0d..1ef3fddcf8e 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -20,7 +20,12 @@ package org.elasticsearch.snapshots; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.service.PendingClusterTask; @@ -56,7 +61,7 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase { return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) // Rebalancing is causing some checks after restore to randomly fail // due to https://github.com/elastic/elasticsearch/issues/9421 - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE) + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) .build(); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index f9392836d8b..dcea25617b2 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -21,8 +21,6 @@ package org.elasticsearch.snapshots; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntSet; - -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; @@ -33,24 +31,17 @@ import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.metadata.MetaData.Custom; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.index.store.IndexStore; @@ -68,9 +59,9 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.TestCustomMetaData; import org.elasticsearch.test.rest.FakeRestRequest; -import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; @@ -123,14 +114,14 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> set test persistent setting"); client.admin().cluster().prepareUpdateSettings().setPersistentSettings( Settings.settingsBuilder() - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2) - .put(IndicesTTLService.INDICES_TTL_INTERVAL, random, TimeUnit.MINUTES)) + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) + .put(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), random, TimeUnit.MINUTES)) .execute().actionGet(); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(random).millis())); + .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(random).millis())); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, -1), equalTo(2)); + .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), -1), equalTo(2)); logger.info("--> create repository"); PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") @@ -146,23 +137,26 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> clean the test persistent setting"); client.admin().cluster().prepareUpdateSettings().setPersistentSettings( Settings.settingsBuilder() - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 1) - .put(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1))) + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1) + .put(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1))) .execute().actionGet(); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(1).millis())); + .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(1).millis())); stopNode(secondNode); assertThat(client.admin().cluster().prepareHealth().setWaitForNodes("1").get().isTimedOut(), equalTo(false)); logger.info("--> restore snapshot"); - client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setRestoreGlobalState(true).setWaitForCompletion(true).execute().actionGet(); - assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(random).millis())); - + try { + client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setRestoreGlobalState(true).setWaitForCompletion(true).execute().actionGet(); + fail("can't restore minimum master nodes"); + } catch (IllegalArgumentException ex) { + assertEquals("illegal value can't update [discovery.zen.minimum_master_nodes] from [1] to [2]", ex.getMessage()); + assertEquals("cannot set discovery.zen.minimum_master_nodes to more than the current master nodes count [1]", ex.getCause().getMessage()); + } logger.info("--> ensure that zen discovery minimum master nodes wasn't restored"); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, -1), not(equalTo(2))); + .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), -1), not(equalTo(2))); } public void testRestoreCustomMetadata() throws Exception { @@ -438,7 +432,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest index("test-idx-all", "doc", Integer.toString(i), "foo", "bar" + i); index("test-idx-closed", "doc", Integer.toString(i), "foo", "bar" + i); } - refresh(); + refresh("test-idx-closed", "test-idx-all"); // don't refresh test-idx-some it will take 30 sec until it times out... assertThat(client().prepareSearch("test-idx-all").setSize(0).get().getHits().totalHits(), equalTo(100L)); assertAcked(client().admin().indices().prepareClose("test-idx-closed")); @@ -554,7 +548,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest public void testRestoreIndexWithShardsMissingInLocalGateway() throws Exception { logger.info("--> start 2 nodes"); Settings nodeSettings = settingsBuilder() - .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE) + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) .build(); internalCluster().startNode(nodeSettings); @@ -899,78 +893,6 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest )); } - public static abstract class TestCustomMetaData extends AbstractDiffable implements MetaData.Custom { - private final String data; - - protected TestCustomMetaData(String data) { - this.data = data; - } - - public String getData() { - return data; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - TestCustomMetaData that = (TestCustomMetaData) o; - - if (!data.equals(that.data)) return false; - - return true; - } - - @Override - public int hashCode() { - return data.hashCode(); - } - - protected abstract TestCustomMetaData newTestCustomMetaData(String data); - - @Override - public Custom readFrom(StreamInput in) throws IOException { - return newTestCustomMetaData(in.readString()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(getData()); - } - - @Override - public Custom fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token; - String data = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - String currentFieldName = parser.currentName(); - if ("data".equals(currentFieldName)) { - if (parser.nextToken() != XContentParser.Token.VALUE_STRING) { - throw new ElasticsearchParseException("failed to parse snapshottable metadata, invalid data type"); - } - data = parser.text(); - } else { - throw new ElasticsearchParseException("failed to parse snapshottable metadata, unknown field [{}]", currentFieldName); - } - } else { - throw new ElasticsearchParseException("failed to parse snapshottable metadata"); - } - } - if (data == null) { - throw new ElasticsearchParseException("failed to parse snapshottable metadata, data not found"); - } - return newTestCustomMetaData(data); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.field("data", getData()); - return builder; - } - } - static { MetaData.registerPrototype(SnapshottableMetadata.TYPE, SnapshottableMetadata.PROTO); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 57a22c0dd15..669527fd5f3 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStage; @@ -38,8 +37,8 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -55,7 +54,6 @@ import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -79,7 +77,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.client.Requests.getSnapshotsRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; diff --git a/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java index 360f549ecbd..eb069d4721c 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java @@ -19,14 +19,13 @@ package org.elasticsearch.snapshots; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.io.FileSystemUtils; diff --git a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java index b1e89cdf268..55744697b4d 100644 --- a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java +++ b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java @@ -29,7 +29,6 @@ import com.spatial4j.core.shape.impl.Range; import com.vividsolutions.jts.algorithm.ConvexHull; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; import org.elasticsearch.common.geo.builders.LineStringBuilder; diff --git a/core/src/test/java/org/elasticsearch/test/hamcrest/DoubleMatcher.java b/core/src/test/java/org/elasticsearch/test/hamcrest/DoubleMatcher.java new file mode 100644 index 00000000000..de275eaffca --- /dev/null +++ b/core/src/test/java/org/elasticsearch/test/hamcrest/DoubleMatcher.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.hamcrest; + + +public class DoubleMatcher { + + /** + * Better floating point comparisons courtesy of https://github.com/brazzy/floating-point-gui.de + * + * Snippet adapted to use doubles instead of floats + */ + public static boolean nearlyEqual(double a, double b, double epsilon) { + final double absA = Math.abs(a); + final double absB = Math.abs(b); + final double diff = Math.abs(a - b); + + if (a == b) { // shortcut, handles infinities + return true; + } else if (a == 0 || b == 0 || diff < Double.MIN_NORMAL) { + // a or b is zero or both are extremely close to it + // relative error is less meaningful here + return diff < (epsilon * Double.MIN_NORMAL); + } else { // use relative error + return diff / Math.min((absA + absB), Double.MAX_VALUE) < epsilon; + } + } +} diff --git a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java index 595e84f41b6..3400f9637ff 100644 --- a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java +++ b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java @@ -25,8 +25,13 @@ import com.spatial4j.core.shape.impl.GeoCircle; import com.spatial4j.core.shape.impl.RectangleImpl; import com.spatial4j.core.shape.jts.JtsGeometry; import com.spatial4j.core.shape.jts.JtsPoint; -import com.vividsolutions.jts.geom.*; -import org.elasticsearch.ElasticsearchParseException; +import com.vividsolutions.jts.geom.Coordinate; +import com.vividsolutions.jts.geom.Geometry; +import com.vividsolutions.jts.geom.LineString; +import com.vividsolutions.jts.geom.MultiLineString; +import com.vividsolutions.jts.geom.MultiPoint; +import com.vividsolutions.jts.geom.MultiPolygon; +import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.builders.ShapeBuilder; @@ -58,29 +63,29 @@ public class ElasticsearchGeoAssertions { } return top; } - + private static int prev(int top, Coordinate...points) { for (int i = 1; i < points.length; i++) { int p = (top + points.length - i) % points.length; if((points[p].x != points[top].x) || (points[p].y != points[top].y)) { return p; - } + } } return -1; } - + private static int next(int top, Coordinate...points) { for (int i = 1; i < points.length; i++) { int n = (top + i) % points.length; if((points[n].x != points[top].x) || (points[n].y != points[top].y)) { return n; - } + } } return -1; } - + private static Coordinate[] fixedOrderedRing(List coordinates, boolean direction) { - return fixedOrderedRing(coordinates.toArray(new Coordinate[coordinates.size()]), direction); + return fixedOrderedRing(coordinates.toArray(new Coordinate[coordinates.size()]), direction); } private static Coordinate[] fixedOrderedRing(Coordinate[] points, boolean direction) { @@ -105,7 +110,7 @@ public class ElasticsearchGeoAssertions { return points; } } - + } public static void assertEquals(Coordinate c1, Coordinate c2) { @@ -115,7 +120,7 @@ public class ElasticsearchGeoAssertions { private static boolean isRing(Coordinate[] c) { return (c[0].x == c[c.length-1].x) && (c[0].y == c[c.length-1].y); } - + public static void assertEquals(Coordinate[] c1, Coordinate[] c2) { Assert.assertEquals(c1.length, c2.length); @@ -234,7 +239,7 @@ public class ElasticsearchGeoAssertions { public static void assertMultiLineString(Shape shape) { assert(unwrap(shape) instanceof MultiLineString): "expected MultiLineString but found " + unwrap(shape).getClass().getName(); } - + public static void assertDistance(String geohash1, String geohash2, Matcher match) { GeoPoint p1 = new GeoPoint(geohash1); GeoPoint p2 = new GeoPoint(geohash2); @@ -244,7 +249,7 @@ public class ElasticsearchGeoAssertions { public static void assertDistance(double lat1, double lon1, double lat2, double lon2, Matcher match) { assertThat(distance(lat1, lon1, lat2, lon2), match); } - + private static double distance(double lat1, double lon1, double lat2, double lon2) { return GeoDistance.ARC.calculate(lat1, lon1, lat2, lon2, DistanceUnit.DEFAULT); } diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java index 3d57c1d5206..e655f452688 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.threadpool; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTypeSettingsValidatorTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTypeSettingsValidatorTests.java deleted file mode 100644 index 3dfca5cb283..00000000000 --- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTypeSettingsValidatorTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed wit[√h - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.threadpool; - -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.settings.Validator; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import java.util.*; - -import static org.junit.Assert.*; - -public class ThreadPoolTypeSettingsValidatorTests extends ESTestCase { - private Validator validator; - - @Before - public void setUp() throws Exception { - super.setUp(); - validator = ThreadPool.THREAD_POOL_TYPE_SETTINGS_VALIDATOR; - } - - public void testValidThreadPoolTypeSettings() { - for (Map.Entry entry : ThreadPool.THREAD_POOL_TYPES.entrySet()) { - assertNull(validateSetting(validator, entry.getKey(), entry.getValue().getType())); - } - } - - public void testInvalidThreadPoolTypeSettings() { - for (Map.Entry entry : ThreadPool.THREAD_POOL_TYPES.entrySet()) { - Set set = new HashSet<>(); - set.addAll(Arrays.asList(ThreadPool.ThreadPoolType.values())); - set.remove(entry.getValue()); - ThreadPool.ThreadPoolType invalidThreadPoolType = randomFrom(set.toArray(new ThreadPool.ThreadPoolType[set.size()])); - String expectedMessage = String.format( - Locale.ROOT, - "thread pool type for [%s] can only be updated to [%s] but was [%s]", - entry.getKey(), - entry.getValue().getType(), - invalidThreadPoolType.getType()); - String message = validateSetting(validator, entry.getKey(), invalidThreadPoolType.getType()); - assertNotNull(message); - assertEquals(expectedMessage, message); - } - } - - public void testNonThreadPoolTypeSetting() { - String setting = ThreadPool.THREADPOOL_GROUP + randomAsciiOfLength(10) + "foo"; - String value = randomAsciiOfLength(10); - assertNull(validator.validate(setting, value, ClusterState.PROTO)); - } - - private String validateSetting(Validator validator, String threadPoolName, String value) { - return validator.validate(ThreadPool.THREADPOOL_GROUP + threadPoolName + ".type", value, ClusterState.PROTO); - } -} diff --git a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index 95ceea1e490..e1b1c4451c9 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.threadpool; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.test.ESTestCase; @@ -35,7 +36,12 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; /** */ @@ -90,17 +96,19 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { ThreadPool threadPool = null; try { threadPool = new ThreadPool(settingsBuilder().put("name", "testUpdateSettingsCanNotChangeThreadPoolType").build()); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setClusterSettings(clusterSettings); - - threadPool.updateSettings( + clusterSettings.applySettings( settingsBuilder() .put("threadpool." + threadPoolName + ".type", invalidThreadPoolType.getType()) .build() ); fail("expected IllegalArgumentException"); } catch (IllegalArgumentException e) { + assertEquals("illegal value can't update [threadpool.] from [{}] to [{" + threadPoolName + ".type=" + invalidThreadPoolType.getType() + "}]", e.getMessage()); assertThat( - e.getMessage(), + e.getCause().getMessage(), is("setting threadpool." + threadPoolName + ".type to " + invalidThreadPoolType.getType() + " is not permitted; must be " + validThreadPoolType.getType())); } finally { terminateThreadPoolIfNeeded(threadPool); @@ -111,14 +119,16 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.CACHED); ThreadPool threadPool = null; try { - threadPool = new ThreadPool( - Settings.settingsBuilder() - .put("name", "testCachedExecutorType").build()); + Settings nodeSettings = Settings.settingsBuilder() + .put("name", "testCachedExecutorType").build(); + threadPool = new ThreadPool(nodeSettings); + ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setClusterSettings(clusterSettings); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED); assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); - threadPool.updateSettings(settingsBuilder() + Settings settings = clusterSettings.applySettings(settingsBuilder() .put("threadpool." + threadPoolName + ".keep_alive", "10m") .build()); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED); @@ -134,7 +144,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { // Change keep alive Executor oldExecutor = threadPool.executor(threadPoolName); - threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); + settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); // Make sure keep alive value changed assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(1L)); assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L)); @@ -143,7 +153,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor)); // Set the same keep alive - threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); + settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); // Make sure keep alive value didn't change assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(1L)); assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L)); @@ -160,11 +170,13 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { ThreadPool threadPool = null; try { - threadPool = new ThreadPool(settingsBuilder() - .put("name", "testCachedExecutorType").build()); + Settings nodeSettings = Settings.settingsBuilder() + .put("name", "testFixedExecutorType").build(); + threadPool = new ThreadPool(nodeSettings); + ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setClusterSettings(clusterSettings); assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); - - threadPool.updateSettings(settingsBuilder() + Settings settings = clusterSettings.applySettings(settingsBuilder() .put("threadpool." + threadPoolName + ".size", "15") .build()); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); @@ -177,7 +189,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(0L)); // Put old type back - threadPool.updateSettings(Settings.EMPTY); + settings = clusterSettings.applySettings(Settings.EMPTY); assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); // Make sure keep alive value is not used assertThat(info(threadPool, threadPoolName).getKeepAlive(), nullValue()); @@ -190,7 +202,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { // Change size Executor oldExecutor = threadPool.executor(threadPoolName); - threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".size", "10").build()); + settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".size", "10").build()); // Make sure size values changed assertThat(info(threadPool, threadPoolName).getMax(), equalTo(10)); assertThat(info(threadPool, threadPoolName).getMin(), equalTo(10)); @@ -201,8 +213,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor)); // Change queue capacity - threadPool.updateSettings(settingsBuilder() - .put("threadpool." + threadPoolName + ".queue", "500") + settings = clusterSettings.applySettings(settingsBuilder().put(settings).put("threadpool." + threadPoolName + ".queue", "500") .build()); } finally { terminateThreadPoolIfNeeded(threadPool); @@ -213,9 +224,12 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.SCALING); ThreadPool threadPool = null; try { - threadPool = new ThreadPool(settingsBuilder() + Settings nodeSettings = settingsBuilder() .put("threadpool." + threadPoolName + ".size", 10) - .put("name", "testCachedExecutorType").build()); + .put("name", "testScalingExecutorType").build(); + threadPool = new ThreadPool(nodeSettings); + ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setClusterSettings(clusterSettings); assertThat(info(threadPool, threadPoolName).getMin(), equalTo(1)); assertThat(info(threadPool, threadPoolName).getMax(), equalTo(10)); assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(5L)); @@ -224,7 +238,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { // Change settings that doesn't require pool replacement Executor oldExecutor = threadPool.executor(threadPoolName); - threadPool.updateSettings(settingsBuilder() + clusterSettings.applySettings(settingsBuilder() .put("threadpool." + threadPoolName + ".keep_alive", "10m") .put("threadpool." + threadPoolName + ".min", "2") .put("threadpool." + threadPoolName + ".size", "15") @@ -248,9 +262,12 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.FIXED); ThreadPool threadPool = null; try { - threadPool = new ThreadPool(Settings.settingsBuilder() + Settings nodeSettings = Settings.settingsBuilder() .put("threadpool." + threadPoolName + ".queue_size", 1000) - .put("name", "testCachedExecutorType").build()); + .put("name", "testCachedExecutorType").build(); + threadPool = new ThreadPool(nodeSettings); + ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setClusterSettings(clusterSettings); assertEquals(info(threadPool, threadPoolName).getQueueSize().getSingles(), 1000L); final CountDownLatch latch = new CountDownLatch(1); @@ -264,7 +281,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { } } ); - threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".queue_size", 2000).build()); + clusterSettings.applySettings(settingsBuilder().put("threadpool." + threadPoolName + ".queue_size", 2000).build()); assertThat(threadPool.executor(threadPoolName), not(sameInstance(oldExecutor))); assertThat(oldExecutor.isShutdown(), equalTo(true)); assertThat(oldExecutor.isTerminating(), equalTo(true)); @@ -279,12 +296,15 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { public void testCustomThreadPool() throws Exception { ThreadPool threadPool = null; try { - threadPool = new ThreadPool(Settings.settingsBuilder() + Settings nodeSettings = Settings.settingsBuilder() .put("threadpool.my_pool1.type", "scaling") .put("threadpool.my_pool2.type", "fixed") .put("threadpool.my_pool2.size", "1") .put("threadpool.my_pool2.queue_size", "1") - .put("name", "testCustomThreadPool").build()); + .put("name", "testCustomThreadPool").build(); + threadPool = new ThreadPool(nodeSettings); + ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool.setClusterSettings(clusterSettings); ThreadPoolInfo groups = threadPool.info(); boolean foundPool1 = false; boolean foundPool2 = false; @@ -316,7 +336,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { Settings settings = Settings.builder() .put("threadpool.my_pool2.size", "10") .build(); - threadPool.updateSettings(settings); + clusterSettings.applySettings(settings); groups = threadPool.info(); foundPool1 = false; diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index becb61666da..a5b6e08de3f 100644 --- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -69,12 +70,12 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { super.setUp(); threadPool = new ThreadPool(getClass().getName()); serviceA = build( - Settings.builder().put("name", "TS_A", TransportService.SETTING_TRACE_LOG_INCLUDE, "", TransportService.SETTING_TRACE_LOG_EXCLUDE, "NOTHING").build(), + Settings.builder().put("name", "TS_A", TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING").build(), version0, new NamedWriteableRegistry() ); nodeA = new DiscoveryNode("TS_A", "TS_A", serviceA.boundAddress().publishAddress(), emptyMap(), version0); serviceB = build( - Settings.builder().put("name", "TS_B", TransportService.SETTING_TRACE_LOG_INCLUDE, "", TransportService.SETTING_TRACE_LOG_EXCLUDE, "NOTHING").build(), + Settings.builder().put("name", "TS_B", TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING").build(), version1, new NamedWriteableRegistry() ); nodeB = new DiscoveryNode("TS_B", "TS_B", serviceB.boundAddress().publishAddress(), emptyMap(), version1); @@ -650,9 +651,10 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { includeSettings = "test"; excludeSettings = "DOESN'T_MATCH"; } - - serviceA.applySettings(Settings.builder() - .put(TransportService.SETTING_TRACE_LOG_INCLUDE, includeSettings, TransportService.SETTING_TRACE_LOG_EXCLUDE, excludeSettings) + ClusterSettings service = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + serviceA.setDynamicSettings(service); + service.applySettings(Settings.builder() + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), includeSettings, TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), excludeSettings) .build()); tracer.reset(4); diff --git a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java index 3f140b388fd..33d0f6ef528 100644 --- a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java +++ b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java @@ -23,12 +23,12 @@ import org.elasticsearch.Version; import org.elasticsearch.cache.recycler.MockPageCacheRecycler; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.netty.NettyTransport; @@ -64,7 +64,7 @@ public class NettySizeHeaderFrameDecoderTests extends ESTestCase { @Before public void startThreadPool() { threadPool = new ThreadPool(settings); - threadPool.setNodeSettingsService(new NodeSettingsService(settings)); + threadPool.setClusterSettings(new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); NetworkService networkService = new NetworkService(settings); BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(settings, threadPool), new NoneCircuitBreakerService()); nettyTransport = new NettyTransport(settings, threadPool, networkService, bigArrays, Version.CURRENT, new NamedWriteableRegistry()); diff --git a/core/src/test/java/org/elasticsearch/transport/TransportModuleTests.java b/core/src/test/java/org/elasticsearch/transport/TransportModuleTests.java new file mode 100644 index 00000000000..d587ab05e45 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/transport/TransportModuleTests.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.Version; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.ModuleTestCase; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.transport.AssertingLocalTransport; +import org.elasticsearch.threadpool.ThreadPool; + +/** Unit tests for module registering custom transport and transport service */ +public class TransportModuleTests extends ModuleTestCase { + + + + static class FakeTransport extends AssertingLocalTransport { + @Inject + public FakeTransport(Settings settings, ThreadPool threadPool, Version version, NamedWriteableRegistry namedWriteableRegistry) { + super(settings, threadPool, version, namedWriteableRegistry); + } + } + + static class FakeTransportService extends TransportService { + @Inject + public FakeTransportService(Settings settings, Transport transport, ThreadPool threadPool) { + super(settings, transport, threadPool); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java b/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java index 1d1ad8d5ba9..d3de3ce59fb 100644 --- a/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java @@ -33,4 +33,4 @@ public class SimpleLocalTransportTests extends AbstractSimpleTransportTestCase { transportService.start(); return transportService; } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java index c84a9eb9a55..55f9bc49df3 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java @@ -21,13 +21,14 @@ package org.elasticsearch.transport.netty; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -40,7 +41,6 @@ import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.RequestHandlerRegistry; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportRequest; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelPipeline; @@ -66,7 +66,7 @@ public class NettyTransportIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) .put("node.mode", "network") - .put(TransportModule.TRANSPORT_TYPE_KEY, "exception-throwing").build(); + .put(NetworkModule.TRANSPORT_TYPE_KEY, "exception-throwing").build(); } @Override @@ -99,8 +99,8 @@ public class NettyTransportIT extends ESIntegTestCase { public String description() { return "an exception throwing transport for testing"; } - public void onModule(TransportModule transportModule) { - transportModule.addTransport("exception-throwing", ExceptionThrowingNettyTransport.class); + public void onModule(NetworkModule module) { + module.registerTransport("exception-throwing", ExceptionThrowingNettyTransport.class); } } @@ -146,7 +146,7 @@ public class NettyTransportIT extends ESIntegTestCase { } if (reg.getExecutor() == ThreadPool.Names.SAME) { //noinspection unchecked - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } else { threadPool.executor(reg.getExecutor()).execute(new RequestHandler(reg, request, transportChannel)); } @@ -176,7 +176,7 @@ public class NettyTransportIT extends ESIntegTestCase { @SuppressWarnings({"unchecked"}) @Override protected void doRun() throws Exception { - reg.getHandler().messageReceived(request, transportChannel); + reg.processMessageReceived(request, transportChannel); } @Override diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java index 59ef26c42af..ee49012291d 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java @@ -19,11 +19,12 @@ package org.elasticsearch.transport.netty; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -31,7 +32,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.junit.annotations.Network; -import org.elasticsearch.transport.TransportModule; import java.net.InetAddress; import java.util.Locale; @@ -60,7 +60,7 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase { Settings.Builder builder = settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) .put("network.host", "127.0.0.1") - .put(TransportModule.TRANSPORT_TYPE_KEY, "netty") + .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") .put("node.mode", "network") .put("transport.profiles.client1.port", randomPortRange) .put("transport.profiles.client1.publish_host", "127.0.0.7") @@ -72,7 +72,7 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase { public void testThatTransportClientCanConnect() throws Exception { Settings settings = settingsBuilder() .put("cluster.name", internalCluster().getClusterName()) - .put(TransportModule.TRANSPORT_TYPE_KEY, "netty") + .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") .put("path.home", createTempDir().toString()) .build(); try (TransportClient transportClient = TransportClient.builder().settings(settings).build()) { diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java index 3437701f6c9..ea67ce32717 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java @@ -21,22 +21,19 @@ package org.elasticsearch.transport.netty; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.transport.TransportModule; import java.net.Inet4Address; -import java.net.Inet6Address; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.not; /** * Checks that Elasticsearch produces a sane publish_address when it binds to @@ -48,7 +45,7 @@ public class NettyTransportPublishAddressIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put(TransportModule.TRANSPORT_TYPE_KEY, "netty") + .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") .put("node.mode", "network").build(); } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java b/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java index 89702118b49..bd26319f4ab 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java @@ -36,6 +36,7 @@ import java.net.UnknownHostException; import static org.hamcrest.Matchers.containsString; public class SimpleNettyTransportTests extends AbstractSimpleTransportTestCase { + @Override protected MockTransportService build(Settings settings, Version version, NamedWriteableRegistry namedWriteableRegistry) { settings = Settings.builder().put(settings).put("transport.tcp.port", "0").build(); @@ -53,4 +54,4 @@ public class SimpleNettyTransportTests extends AbstractSimpleTransportTestCase { assertThat(e.getMessage(), containsString("[localhost/127.0.0.1:9876]")); } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java index 28a3dea118e..1350dcbb8ed 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing; import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.NodeConfigurationSource; @@ -47,6 +48,7 @@ import org.junit.BeforeClass; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Map; @@ -79,6 +81,11 @@ public class TribeIT extends ESIntegTestCase { return Settings.builder().put(Node.HTTP_ENABLED, false).build(); } + @Override + public Collection> nodePlugins() { + return Collections.emptyList(); + } + @Override public Settings transportClientSettings() { return null; @@ -86,7 +93,7 @@ public class TribeIT extends ESIntegTestCase { }; cluster2 = new InternalTestCluster(InternalTestCluster.configuredNodeMode(), randomLong(), createTempDir(), 2, 2, - Strings.randomBase64UUID(getRandom()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, true); + Strings.randomBase64UUID(getRandom()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, Collections.emptyList()); cluster2.beforeTest(getRandom(), 0.1); cluster2.ensureAtLeastNumDataNodes(2); diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index a789bb48774..09887d83541 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -120,7 +120,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return new Object(); // unused } @@ -218,7 +218,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } @@ -309,7 +309,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return new Object(); // unused } @@ -400,7 +400,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return new Object(); // unused } diff --git a/core/src/test/resources/indices/bwc/index-2.0.2.zip b/core/src/test/resources/indices/bwc/index-2.0.2.zip new file mode 100644 index 00000000000..2f77405a831 Binary files /dev/null and b/core/src/test/resources/indices/bwc/index-2.0.2.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.1.1.zip b/core/src/test/resources/indices/bwc/index-2.1.1.zip new file mode 100644 index 00000000000..74c967d2c61 Binary files /dev/null and b/core/src/test/resources/indices/bwc/index-2.1.1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.2.zip b/core/src/test/resources/indices/bwc/repo-2.0.2.zip new file mode 100644 index 00000000000..696ffd939d5 Binary files /dev/null and b/core/src/test/resources/indices/bwc/repo-2.0.2.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.1.1.zip b/core/src/test/resources/indices/bwc/repo-2.1.1.zip new file mode 100644 index 00000000000..3253da62c3f Binary files /dev/null and b/core/src/test/resources/indices/bwc/repo-2.1.1.zip differ diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json index 6206592afcb..595f62210dd 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json @@ -2,13 +2,10 @@ person:{ properties:{ "name":{ - type:"multi_field", + type:"string", + index:"analyzed", + store:"yes", "fields":{ - "name":{ - type:"string", - index:"analyzed", - store:"yes" - }, "indexed":{ type:"string", index:"analyzed" @@ -22,4 +19,4 @@ } } } -} \ No newline at end of file +} diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json index 4a8fbf66ced..3cfca9c313e 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json @@ -2,13 +2,10 @@ person:{ properties:{ "name":{ - type:"multi_field", + type:"string", + index:"analyzed", + store:"yes", "fields":{ - "name":{ - type:"string", - index:"analyzed", - store:"yes" - }, "indexed":{ type:"string", index:"analyzed" @@ -27,4 +24,4 @@ } } } -} \ No newline at end of file +} diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json index 9b309789f50..046b0c234d4 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json @@ -2,7 +2,8 @@ person:{ properties:{ "name":{ - type:"multi_field", + type:"string", + index:"no", "fields":{ "not_indexed3":{ type:"string", @@ -13,4 +14,4 @@ } } } -} \ No newline at end of file +} diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type.json deleted file mode 100644 index b099b9ab208..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "person":{ - "properties":{ - "name":{ - "type":"multi_field", - "fields":{ - "name":{ - "type":"string", - "index":"analyzed", - "store":"yes" - }, - "indexed":{ - "type":"string", - "index":"analyzed" - }, - "not_indexed":{ - "type":"string", - "index":"no", - "store":"yes" - }, - "test1" : { - "type":"string", - "index":"analyzed", - "store" : "yes", - "fielddata" : { - "loading" : "eager" - } - }, - "test2" : { - "type" : "token_count", - "store" : "yes", - "index" : "not_analyzed", - "analyzer" : "simple" - } - } - }, - "object1":{ - "properties":{ - "multi1":{ - "type":"multi_field", - "fields":{ - "multi1":{ - "type":"date" - }, - "string":{ - "type":"string", - "index":"not_analyzed" - } - } - } - } - } - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json b/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json index 252aafefb08..6ddde341fc2 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json @@ -2,7 +2,7 @@ "type": { "_all": { "store": false, - "enabled": false, + "enabled": true, "store_term_vectors": false, "store_term_vector_offsets": false, "store_term_vector_positions": false, @@ -10,7 +10,7 @@ "omit_norms": false, "analyzer": "whitespace", "search_analyzer": "standard", - "similarity": "bm25", + "similarity": "BM25", "fielddata": { "format": "paged_bytes" } diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index 3fa61c4361f..5a94377407d 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -67,14 +67,14 @@ DEFAULT_PLUGINS = ["analysis-icu", "discovery-ec2", "discovery-gce", "discovery-multicast", - "lang-expression", - "lang-groovy", "lang-javascript", "lang-plan-a", "lang-python", + "mapper-attachments", "mapper-murmur3", "mapper-size", "repository-azure", + "repository-hdfs", "repository-s3", "store-smb"] diff --git a/distribution/build.gradle b/distribution/build.gradle index 4da164131f3..fcf22ff48dc 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -17,15 +17,14 @@ * under the License. */ + import org.apache.tools.ant.filters.FixCrLfFilter import org.apache.tools.ant.taskdefs.condition.Os +import org.elasticsearch.gradle.EmptyDirTask +import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.precommit.DependencyLicensesTask import org.elasticsearch.gradle.precommit.UpdateShasTask import org.elasticsearch.gradle.test.RunTask -import org.elasticsearch.gradle.EmptyDirTask -import org.elasticsearch.gradle.MavenFilteringHack -import org.gradle.api.InvalidUserDataException -import org.gradle.internal.nativeintegration.filesystem.Chmod // for deb/rpm buildscript { diff --git a/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java index fd12fd2e519..5537bbfa646 100644 --- a/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; diff --git a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java index fd12fd2e519..5537bbfa646 100644 --- a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; diff --git a/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 5d95f64a15f..00000000000 --- a/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9f2b9811a4f4a57a1b3a98bdc1e1b63476b9f628 \ No newline at end of file diff --git a/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..2edc39c7029 --- /dev/null +++ b/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +69e187ef1d2d9c9570363eb4186821e0341df5b8 \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 0ae258b597a..00000000000 --- a/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -038071889a5dbeb279e37fa46225e194139a427c \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..0b6a49a68e3 --- /dev/null +++ b/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +0fa00a45ff9bc6a4df44db81f2e4e44ea94bf88e \ No newline at end of file diff --git a/distribution/licenses/lucene-core-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-core-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index aee7c10cffd..00000000000 --- a/distribution/licenses/lucene-core-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b986d0ad8ee4dda8172a5a61875c47631e4b21d4 \ No newline at end of file diff --git a/distribution/licenses/lucene-core-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-core-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..3ff27aff723 --- /dev/null +++ b/distribution/licenses/lucene-core-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +f6854c65c7f4c6d9de583f4daa4fd3ae8a3800f1 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-grouping-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index aa1011e007e..00000000000 --- a/distribution/licenses/lucene-grouping-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f46574fbdfbcc81d936c77e15ba5b3af2c2b7253 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-grouping-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..9ffcb6d07cf --- /dev/null +++ b/distribution/licenses/lucene-grouping-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +e996e6c723eb415ba2cfa7f5e98bbf194a4918dd \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 561f17e773c..00000000000 --- a/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f620262d667a294d390e8df7575cc2cca2626559 \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..b126eebd88f --- /dev/null +++ b/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +3b7a5d97b10885f16eb53deb15d64c942b9f9fdb \ No newline at end of file diff --git a/distribution/licenses/lucene-join-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-join-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 4735bdf1d2d..00000000000 --- a/distribution/licenses/lucene-join-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c44b07242fd706f6f7f14c9063a725e0e5b98cd \ No newline at end of file diff --git a/distribution/licenses/lucene-join-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-join-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..8313bac1acf --- /dev/null +++ b/distribution/licenses/lucene-join-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +e4dda3eeb76e340aa4713a3b20d68c4a1504e505 \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-memory-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 9c19a6ad622..00000000000 --- a/distribution/licenses/lucene-memory-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1e33e0aa5fc227e90c8314f61b4cba1090035e33 \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-memory-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..1802f859ae0 --- /dev/null +++ b/distribution/licenses/lucene-memory-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +800442a5d7612ce4c8748831871b4d436a50554e \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-misc-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index c4a61bff68b..00000000000 --- a/distribution/licenses/lucene-misc-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e416893f7b781239a15d3e2c7200ff26574d14de \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-misc-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..1c543141bbf --- /dev/null +++ b/distribution/licenses/lucene-misc-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +bdf184de9b5773c7af3ae908af78eeb1e512470c \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-queries-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index edc56751403..00000000000 --- a/distribution/licenses/lucene-queries-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b153b63b9333feedb18af2673eb6ccaf95bcc8bf \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-queries-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..f3eb218b9e0 --- /dev/null +++ b/distribution/licenses/lucene-queries-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +fc59de52bd2c7e420edfd235723cb8b0dd44e92d \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index eddd3d6cdcd..00000000000 --- a/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0aa2758d70a79f2e0f33a87624fd9d31e155c864 \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..4ce5c2024f0 --- /dev/null +++ b/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +1d341e6a4f11f3170773ccffdbe6815b45967e3d \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 571903cc72c..00000000000 --- a/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -873c716ba629dae389b12ddb1aedf2f5c5f57fea \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..cf78d108a11 --- /dev/null +++ b/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +a1b02c2b595ac92f45f0d2be03841a3a7fcae1f1 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-spatial-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 5e6a27b7cd1..00000000000 --- a/distribution/licenses/lucene-spatial-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9d7e47c2fb73c614cc5ca41529b2c273c73b0ce7 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-spatial-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..2634a93e82d --- /dev/null +++ b/distribution/licenses/lucene-spatial-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +e3ea422b56734329fb6974e9cf9f66478adb5793 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index cf841e18c5a..00000000000 --- a/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4766305088797a66fe02d5aaa98e086867816e42 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..391d044c719 --- /dev/null +++ b/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +5eadbd4e63120b59ab6445e39489205f98420471 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-5.5.0-snapshot-1719088.jar.sha1 b/distribution/licenses/lucene-suggest-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 1fbb60a9d7a..00000000000 --- a/distribution/licenses/lucene-suggest-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f0ee6fb780ea8aa9ec6d31e6a9cc7d48700bd2ca \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-suggest-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..f9f2bf5a43c --- /dev/null +++ b/distribution/licenses/lucene-suggest-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +a336287e65d082535f02a8427666dbe46b1b9b74 \ No newline at end of file diff --git a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java index fd12fd2e519..5537bbfa646 100644 --- a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; diff --git a/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java index fd12fd2e519..5537bbfa646 100644 --- a/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; diff --git a/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java index fd12fd2e519..5537bbfa646 100644 --- a/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; diff --git a/docs/java-api/admin/cluster/health.asciidoc b/docs/java-api/admin/cluster/health.asciidoc new file mode 100644 index 00000000000..7d20fdde6a3 --- /dev/null +++ b/docs/java-api/admin/cluster/health.asciidoc @@ -0,0 +1,76 @@ +[[java-admin-cluster-health]] +==== Cluster Health + +[[java-admin-cluster-health-health]] +===== Health + +The cluster health API allows to get a very simple status on the health of the cluster and also can give you +some technical information about the cluster status per index: + +[source,java] +-------------------------------------------------- +ClusterHealthResponse healths = client.admin().cluster().prepareHealth().get(); <1> +String clusterName = healths.getClusterName(); <2> +int numberOfDataNodes = healths.getNumberOfDataNodes(); <3> +int numberOfNodes = healths.getNumberOfNodes(); <4> + +for (ClusterIndexHealth health : healths) { <5> + String index = health.getIndex(); <6> + int numberOfShards = health.getNumberOfShards(); <7> + int numberOfReplicas = health.getNumberOfReplicas(); <8> + ClusterHealthStatus status = health.getStatus(); <9> +} +-------------------------------------------------- +<1> Get information for all indices +<2> Access the cluster name +<3> Get the total number of data nodes +<4> Get the total number of nodes +<5> Iterate over all indices +<6> Index name +<7> Number of shards +<8> Number of replicas +<9> Index status + +[[java-admin-cluster-health-wait-status]] +===== Wait for status + +You can use the cluster health API to wait for a specific status for the whole cluster or for a given index: + +[source,java] +-------------------------------------------------- +client.admin().cluster().prepareHealth() <1> + .setWaitForYellowStatus() <2> + .get(); +client.admin().cluster().prepareHealth("company") <3> + .setWaitForGreenStatus() <4> + .get(); + +client.admin().cluster().prepareHealth("employee") <5> + .setWaitForGreenStatus() <6> + .setTimeout(TimeValue.timeValueSeconds(2)) <7> + .get(); +-------------------------------------------------- +<1> Prepare a health request +<2> Wait for the cluster being yellow +<3> Prepare the health request for index `company` +<4> Wait for the index being green +<5> Prepare the health request for index `employee` +<6> Wait for the index being green +<7> Wait at most for 2 seconds + +If the index does not have the expected status and you want to fail in that case, you need +to explicitly interpret the result: + +[source,java] +-------------------------------------------------- +ClusterHealthResponse response = client.admin().cluster().prepareHealth("company") + .setWaitForGreenStatus() <1> + .get(); + +ClusterHealthStatus status = response.getIndices().get("company").getStatus(); +if (!status.equals(ClusterHealthStatus.GREEN)) { + throw new RuntimeException("Index is in " + status + " state"); <2> +} +-------------------------------------------------- +<1> Wait for the index being green +<2> Throw an exception if not `GREEN` diff --git a/docs/java-api/admin/cluster/index.asciidoc b/docs/java-api/admin/cluster/index.asciidoc new file mode 100644 index 00000000000..e5525e18a49 --- /dev/null +++ b/docs/java-api/admin/cluster/index.asciidoc @@ -0,0 +1,15 @@ +[[java-admin-cluster]] +=== Cluster Administration + +To access cluster Java API, you need to call `cluster()` method from an <>: + +[source,java] +-------------------------------------------------- +ClusterAdminClient clusterAdminClient = client.admin().cluster(); +-------------------------------------------------- + +[NOTE] +In the rest of this guide, we will use `client.admin().cluster()`. + +include::health.asciidoc[] + diff --git a/docs/java-api/admin/index.asciidoc b/docs/java-api/admin/index.asciidoc new file mode 100644 index 00000000000..41599a82c7b --- /dev/null +++ b/docs/java-api/admin/index.asciidoc @@ -0,0 +1,18 @@ +[[java-admin]] +== Java API Administration + +Elasticsearch provides a full Java API to deal with administration tasks. + +To access them, you need to call `admin()` method from a client to get an `AdminClient`: + +[source,java] +-------------------------------------------------- +AdminClient adminClient = client.admin(); +-------------------------------------------------- + +[NOTE] +In the rest of this guide, we will use `client.admin()`. + +include::indices/index.asciidoc[] + +include::cluster/index.asciidoc[] diff --git a/docs/java-api/admin/indices/create-index.asciidoc b/docs/java-api/admin/indices/create-index.asciidoc new file mode 100644 index 00000000000..34b776bd04e --- /dev/null +++ b/docs/java-api/admin/indices/create-index.asciidoc @@ -0,0 +1,28 @@ +[[java-admin-indices-create-index]] +==== Create Index + +Using an <>, you can create an index with all default settings and no mapping: + +[source,java] +-------------------------------------------------- +client.admin().indices().prepareCreate("twitter").get(); +-------------------------------------------------- + +[float] +[[java-admin-indices-create-index-settings]] +===== Index Settings + +Each index created can have specific settings associated with it. + +[source,java] +-------------------------------------------------- +client.admin().indices().prepareCreate("twitter") + .setSettings(Settings.builder() <1> + .put("index.number_of_shards", 3) + .put("index.number_of_replicas", 2) + ) + .get(); <2> +-------------------------------------------------- +<1> Settings for this index +<2> Execute the action and wait for the result + diff --git a/docs/java-api/admin/indices/get-settings.asciidoc b/docs/java-api/admin/indices/get-settings.asciidoc new file mode 100644 index 00000000000..844aaf65ec9 --- /dev/null +++ b/docs/java-api/admin/indices/get-settings.asciidoc @@ -0,0 +1,22 @@ +[[java-admin-indices-get-settings]] +==== Get Settings + +The get settings API allows to retrieve settings of index/indices: + +[source,java] +-------------------------------------------------- +GetSettingsResponse response = client.admin().indices() + .prepareGetSettings("company", "employee").get(); <1> +for (ObjectObjectCursor cursor : response.getIndexToSettings()) { <2> + String index = cursor.key; <3> + Settings settings = cursor.value; <4> + Integer shards = settings.getAsInt("index.number_of_shards", null); <5> + Integer replicas = settings.getAsInt("index.number_of_replicas", null); <6> +} +-------------------------------------------------- +<1> Get settings for indices `company` and `employee` +<2> Iterate over results +<3> Index name +<4> Settings for the given index +<5> Number of shards for this index +<6> Number of replicas for this index diff --git a/docs/java-api/admin/indices/index.asciidoc b/docs/java-api/admin/indices/index.asciidoc new file mode 100644 index 00000000000..bbd365076c7 --- /dev/null +++ b/docs/java-api/admin/indices/index.asciidoc @@ -0,0 +1,21 @@ +[[java-admin-indices]] +=== Indices Administration + +To access indices Java API, you need to call `indices()` method from an <>: + +[source,java] +-------------------------------------------------- +IndicesAdminClient indicesAdminClient = client.admin().indices(); +-------------------------------------------------- + +[NOTE] +In the rest of this guide, we will use `client.admin().indices()`. + +include::create-index.asciidoc[] + +include::put-mapping.asciidoc[] + +include::refresh.asciidoc[] + +include::get-settings.asciidoc[] +include::update-settings.asciidoc[] diff --git a/docs/java-api/admin/indices/put-mapping.asciidoc b/docs/java-api/admin/indices/put-mapping.asciidoc new file mode 100644 index 00000000000..9b085975077 --- /dev/null +++ b/docs/java-api/admin/indices/put-mapping.asciidoc @@ -0,0 +1,76 @@ +[[java-admin-indices-put-mapping]] +==== Put Mapping + +The PUT mapping API allows you to add a new type while creating an index: + +[source,java] +-------------------------------------------------- +client.admin().indices().prepareCreate("twitter") <1> + .addMapping("tweet", "{\n" + <2> + " \"tweet\": {\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"string\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }") + .get(); +-------------------------------------------------- +<1> <> called `twitter` +<2> It also adds a `tweet` mapping type. + + +The PUT mapping API also allows to add a new type to an existing index: + +[source,java] +-------------------------------------------------- +client.admin().indices().preparePutMapping("twitter") <1> + .setType("user") <2> + .setSource("{\n" + <3> + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"string\"\n" + + " }\n" + + " }\n" + + "}") + .get(); + +// You can also provide the type in the source document +client.admin().indices().preparePutMapping("twitter") + .setType("user") + .setSource("{\n" + + " \"user\":{\n" + <4> + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"string\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}") + .get(); +-------------------------------------------------- +<1> Puts a mapping on existing index called `twitter` +<2> Adds a `user` mapping type. +<3> This `user` has a predefined type +<4> type can be also provided within the source + +You can use the same API to update an existing mapping: + +[source,java] +-------------------------------------------------- +client.admin().indices().preparePutMapping("twitter") <1> + .setType("tweet") <2> + .setSource("{\n" + <3> + " \"properties\": {\n" + + " \"user_name\": {\n" + + " \"type\": \"string\"\n" + + " }\n" + + " }\n" + + "}") + .get(); +-------------------------------------------------- +<1> Puts a mapping on existing index called `twitter` +<2> Updates the `user` mapping type. +<3> This `user` has now a new field `user_name` + diff --git a/docs/java-api/admin/indices/refresh.asciidoc b/docs/java-api/admin/indices/refresh.asciidoc new file mode 100644 index 00000000000..856c270daf3 --- /dev/null +++ b/docs/java-api/admin/indices/refresh.asciidoc @@ -0,0 +1,19 @@ +[[java-admin-indices-refresh]] +==== Refresh + +The refresh API allows to explicitly refresh one or more index: + +[source,java] +-------------------------------------------------- +client.admin().indices().prepareRefresh().get(); <1> +client.admin().indices() + .prepareRefresh("twitter") <2> + .get(); +client.admin().indices() + .prepareRefresh("twitter", "company") <3> + .get(); +-------------------------------------------------- +<1> Refresh all indices +<2> Refresh one index +<3> Refresh many indices + diff --git a/docs/java-api/admin/indices/update-settings.asciidoc b/docs/java-api/admin/indices/update-settings.asciidoc new file mode 100644 index 00000000000..9c2cba2adf0 --- /dev/null +++ b/docs/java-api/admin/indices/update-settings.asciidoc @@ -0,0 +1,16 @@ +[[java-admin-indices-update-settings]] +==== Update Indices Settings + +You can change index settings by calling: + +[source,java] +-------------------------------------------------- +client.admin().indices().prepareUpdateSettings("twitter") <1> + .setSettings(Settings.builder() <2> + .put("index.number_of_replicas", 0) + ) + .get(); +-------------------------------------------------- +<1> Index to update +<2> Settings + diff --git a/docs/java-api/docs/bulk.asciidoc b/docs/java-api/docs/bulk.asciidoc index 6890f7c49d3..248326700c4 100644 --- a/docs/java-api/docs/bulk.asciidoc +++ b/docs/java-api/docs/bulk.asciidoc @@ -47,6 +47,7 @@ To use it, first create a `BulkProcessor` instance: [source,java] -------------------------------------------------- +import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkProcessor; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -73,6 +74,8 @@ BulkProcessor bulkProcessor = BulkProcessor.builder( .setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) <6> .setFlushInterval(TimeValue.timeValueSeconds(5)) <7> .setConcurrentRequests(1) <8> + .setBackoffPolicy( + BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(100), 3)) <9> .build(); -------------------------------------------------- <1> Add your elasticsearch client @@ -86,6 +89,10 @@ BulkProcessor bulkProcessor = BulkProcessor.builder( <7> We want to flush the bulk every 5 seconds whatever the number of requests <8> Set the number of concurrent requests. A value of 0 means that only a single request will be allowed to be executed. A value of 1 means 1 concurrent request is allowed to be executed while accumulating new bulk requests. +<9> Set a custom backoff policy which will initially wait for 100ms, increase exponentially and retries up to three + times. A retry is attempted whenever one or more bulk item requests have failed with an `EsRejectedExecutionException` + which indicates that there were too little compute resources available for processing the request. To disable backoff, + pass `BackoffPolicy.noBackoff()`. Then you can simply add your requests to the `BulkProcessor`: @@ -101,6 +108,7 @@ By default, `BulkProcessor`: * sets bulkSize to `5mb` * does not set flushInterval * sets concurrentRequests to 1 +* sets backoffPolicy to an exponential backoff with 8 retries and a start delay of 50ms. The total wait time is roughly 5.1 seconds. When all documents are loaded to the `BulkProcessor` it can be closed by using `awaitClose` or `close` methods: diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index 16403d5c147..012633f1e4b 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -147,3 +147,5 @@ include::percolate.asciidoc[] include::query-dsl.asciidoc[] include::indexed-scripts.asciidoc[] + +include::admin/index.asciidoc[] diff --git a/docs/java-api/indexed-scripts.asciidoc b/docs/java-api/indexed-scripts.asciidoc index 7bfb9f3273f..45d19ae662d 100644 --- a/docs/java-api/indexed-scripts.asciidoc +++ b/docs/java-api/indexed-scripts.asciidoc @@ -10,7 +10,7 @@ and delete indexed scripts and templates. PutIndexedScriptResponse = client.preparePutIndexedScript() .setScriptLang("groovy") .setId("script1") - .setSource("_score * doc['my_numeric_field'].value") + .setSource("script", "_score * doc['my_numeric_field'].value") .execute() .actionGet(); diff --git a/docs/plugins/authors.asciidoc b/docs/plugins/authors.asciidoc index 75b7776ec09..9461ba8dd53 100644 --- a/docs/plugins/authors.asciidoc +++ b/docs/plugins/authors.asciidoc @@ -17,7 +17,7 @@ listed in this documentation for inspiration. ==================================== The example site plugin mentioned above contains all of the scaffolding needed -for integrating with Maven builds. If you don't plan on using Maven, then all +for integrating with Gradle builds. If you don't plan on using Gradle, then all you really need in your plugin is: * The `plugin-descriptor.properties` file @@ -33,14 +33,14 @@ All plugins, be they site or Java plugins, must contain a file called `plugin-descriptor.properties` in the root directory. The format for this file is described in detail here: -https://github.com/elastic/elasticsearch/blob/master/dev-tools/src/main/resources/plugin-metadata/plugin-descriptor.properties[`dev-tools/src/main/resources/plugin-metadata/plugin-descriptor.properties`]. +https://github.com/elastic/elasticsearch/blob/master/buildSrc/src/main/resources/plugin-descriptor.properties[`/buildSrc/src/main/resources/plugin-descriptor.properties`]. Either fill in this template yourself (see https://github.com/lmenezes/elasticsearch-kopf/blob/master/plugin-descriptor.properties[elasticsearch-kopf] -as an example) or, if you are using Elasticsearch's Maven build system, you -can fill in the necessary values in the `pom.xml` for your plugin. For +as an example) or, if you are using Elasticsearch's Gradle build system, you +can fill in the necessary values in the `build.gradle` file for your plugin. For instance, see -https://github.com/elastic/elasticsearch/blob/master/plugins/site-example/pom.xml[`plugins/site-example/pom.xml`]. +https://github.com/elastic/elasticsearch/blob/master/plugins/site-example/build.gradle[`/plugins/site-example/build.gradle`]. [float] ==== Mandatory elements for all plugins @@ -123,13 +123,13 @@ Read more in {ref}/integration-tests.html#changing-node-configuration[Changing N === Java Security permissions Some plugins may need additional security permissions. A plugin can include -the optional `plugin-security.policy` file containing `grant` statements for -additional permissions. Any additional permissions will be displayed to the user -with a large warning, and they will have to confirm them when installing the +the optional `plugin-security.policy` file containing `grant` statements for +additional permissions. Any additional permissions will be displayed to the user +with a large warning, and they will have to confirm them when installing the plugin interactively. So if possible, it is best to avoid requesting any spurious permissions! -If you are using the elasticsearch Maven build system, place this file in +If you are using the elasticsearch Gradle build system, place this file in `src/main/plugin-metadata` and it will be applied during unit tests as well. Keep in mind that the Java security model is stack-based, and the additional diff --git a/docs/plugins/repository-azure.asciidoc b/docs/plugins/repository-azure.asciidoc index 9846b5fbf58..c93419de260 100644 --- a/docs/plugins/repository-azure.asciidoc +++ b/docs/plugins/repository-azure.asciidoc @@ -64,6 +64,29 @@ cloud: `my_account1` is the default account which will be used by a repository unless you set an explicit one. +You can set the timeout to use when making any single request. It can be defined globally, per account or both. +Defaults to `5m`. + +[source,yaml] +---- +cloud: + azure: + storage: + timeout: 10s + my_account1: + account: your_azure_storage_account1 + key: your_azure_storage_key1 + default: true + my_account2: + account: your_azure_storage_account2 + key: your_azure_storage_key2 + timeout: 30s +---- + +In this example, timeout will be 10s for `my_account1` and 30s for `my_account2`. + +[[repository-azure-repository-settings]] +===== Repository settings The Azure repository supports following settings: @@ -155,6 +178,22 @@ client.admin().cluster().preparePutRepository("my_backup_java1") ).get(); ---- +[[repository-azure-global-settings]] +===== Global repositories settings + +All those repository settings can also be defined globally in `elasticsearch.yml` file using prefix +`repositories.azure.`. For example: + +[source,yaml] +---- +repositories.azure: + container: backup-container + base_path: backups + chunk_size: 32m + compress": true +---- + + [[repository-azure-validation]] ===== Repository validation rules diff --git a/docs/plugins/repository-hdfs.asciidoc b/docs/plugins/repository-hdfs.asciidoc index ea13e5ad3a6..28abaf78f66 100644 --- a/docs/plugins/repository-hdfs.asciidoc +++ b/docs/plugins/repository-hdfs.asciidoc @@ -8,29 +8,25 @@ The HDFS repository plugin adds support for using HDFS File System as a reposito [float] ==== Installation -This plugin can be installed using the plugin manager: +This plugin can be installed through the plugin manager: [source,sh] ---------------------------------------------------------------- sudo bin/plugin install repository-hdfs -sudo bin/plugin install repository-hdfs-hadoop2 -sudo bin/plugin install repository-hdfs-lite ---------------------------------------------------------------- -The plugin must be installed on every node in the cluster, and each node must +The plugin must be installed on _every_ node in the cluster, and each node must be restarted after installation. [[repository-hdfs-remove]] [float] ==== Removal -The plugin can be removed with the following command: +The plugin can be removed by specifying the _installed_ package: [source,sh] ---------------------------------------------------------------- sudo bin/plugin remove repository-hdfs -sudo bin/plugin remove repository-hdfs-hadoop2 -sudo bin/plugin remove repository-hdfs-lite ---------------------------------------------------------------- The node must be stopped before removing the plugin. @@ -38,48 +34,14 @@ The node must be stopped before removing the plugin. [[repository-hdfs-usage]] ==== Getting started with HDFS -The HDFS snapshot/restore plugin comes in three _flavors_: +The HDFS snapshot/restore plugin is built against the latest Apache Hadoop 2.x (currently 2.7.1). If the distro you are using is not protocol +compatible with Apache Hadoop, consider replacing the Hadoop libraries inside the plugin folder with your own (you might have to adjust the security permissions required). -* Default / Hadoop 1.x:: -The default version contains the plugin jar alongside Apache Hadoop 1.x (stable) dependencies. -* YARN / Hadoop 2.x:: -The `hadoop2` version contains the plugin jar plus the Apache Hadoop 2.x (also known as YARN) dependencies. -* Lite:: -The `lite` version contains just the plugin jar, without any Hadoop dependencies. The user should provide these (read below). +Even if Hadoop is already installed on the Elasticsearch nodes, for security reasons, the required libraries need to be placed under the plugin folder. Note that in most cases, if the distro is compatible, one simply needs to configure the repository with the appropriate Hadoop configuration files (see below). -[[repository-hdfs-flavor]] -===== What version to use? - -It depends on whether Hadoop is locally installed or not and if not, whether it is compatible with Apache Hadoop clients. - -* Are you using Apache Hadoop (or a _compatible_ distro) and do not have installed on the Elasticsearch nodes?:: -+ -If the answer is yes, for Apache Hadoop 1 use the default `repository-hdfs` or `repository-hdfs-hadoop2` for Apache Hadoop 2. -+ -* If you are have Hadoop installed locally on the Elasticsearch nodes or are using a certain distro:: -+ -Use the `lite` version and place your Hadoop _client_ jars and their dependencies in the plugin folder under `hadoop-libs`. -For large deployments, it is recommended to package the libraries in the plugin zip and deploy it manually across nodes -(and thus avoiding having to do the libraries setup on each node). - -[[repository-hdfs-security]] -==== Handling JVM Security and Permissions - -Out of the box, Elasticsearch runs in a JVM with the security manager turned _on_ to make sure that unsafe or sensitive actions -are allowed only from trusted code. Hadoop however is not really designed to run under one; it does not rely on privileged blocks -to execute sensitive code, of which it uses plenty. - -The `repository-hdfs` plugin provides the necessary permissions for both Apache Hadoop 1.x and 2.x (latest versions) to successfully -run in a secured JVM as one can tell from the number of permissions required when installing the plugin. -However using a certain Hadoop File-System (outside DFS), a certain distro or operating system (in particular Windows), might require -additional permissions which are not provided by the plugin. - -In this case there are several workarounds: -* add the permission into `plugin-security.policy` (available in the plugin folder) -* disable the security manager through `es.security.manager.enabled=false` configurations setting - NOT RECOMMENDED - -If you find yourself in such a situation, please let us know what Hadoop distro version and OS you are using and what permission is missing -by raising an issue. Thank you! +Windows Users:: +Using Apache Hadoop on Windows is problematic and thus it is not recommended. For those _really_ wanting to use it, make sure you place the elusive `winutils.exe` under the +plugin folder and point `HADOOP_HOME` variable to it; this should minimize the amount of permissions Hadoop requires (though one would still have to add some more). [[repository-hdfs-config]] ==== Configuration Properties @@ -87,29 +49,17 @@ by raising an issue. Thank you! Once installed, define the configuration for the `hdfs` repository through `elasticsearch.yml` or the {ref}/modules-snapshots.html[REST API]: -[source] +[source,yaml] ---- repositories hdfs: - uri: "hdfs://:/" # optional - Hadoop file-system URI - path: "some/path" # required - path with the file-system where data is stored/loaded - load_defaults: "true" # optional - whether to load the default Hadoop configuration (default) or not - conf_location: "extra-cfg.xml" # optional - Hadoop configuration XML to be loaded (use commas for multi values) - conf. : "" # optional - 'inlined' key=value added to the Hadoop configuration - concurrent_streams: 5 # optional - the number of concurrent streams (defaults to 5) - compress: "false" # optional - whether to compress the metadata or not (default) - chunk_size: "10mb" # optional - chunk size (disabled by default) + uri: "hdfs://:/" \# required - HDFS address only + path: "some/path" \# required - path within the file-system where data is stored/loaded + load_defaults: "true" \# optional - whether to load the default Hadoop configuration (default) or not + conf_location: "extra-cfg.xml" \# optional - Hadoop configuration XML to be loaded (use commas for multi values) + conf. : "" \# optional - 'inlined' key=value added to the Hadoop configuration + concurrent_streams: 5 \# optional - the number of concurrent streams (defaults to 5) + compress: "false" \# optional - whether to compress the metadata or not (default) + chunk_size: "10mb" \# optional - chunk size (disabled by default) + ---- - -NOTE: Be careful when including a paths within the `uri` setting; Some implementations ignore them completely while -others consider them. In general, we recommend keeping the `uri` to a minimum and using the `path` element instead. - -===== Plugging other file-systems - -Any HDFS-compatible file-systems (like Amazon `s3://` or Google `gs://`) can be used as long as the proper Hadoop -configuration is passed to the Elasticsearch plugin. In practice, this means making sure the correct Hadoop configuration -files (`core-site.xml` and `hdfs-site.xml`) and its jars are available in plugin classpath, just as you would with any -other Hadoop client or job. - -Otherwise, the plugin will only read the _default_, vanilla configuration of Hadoop and will not be able to recognized -the plugged-in file-system. diff --git a/docs/plugins/repository.asciidoc b/docs/plugins/repository.asciidoc index 554fa34b033..5706fc74c12 100644 --- a/docs/plugins/repository.asciidoc +++ b/docs/plugins/repository.asciidoc @@ -18,10 +18,9 @@ The S3 repository plugin adds support for using S3 as a repository. The Azure repository plugin adds support for using Azure as a repository. -https://github.com/elastic/elasticsearch-hadoop/tree/master/repository-hdfs[Hadoop HDFS Repository]:: +<>:: -The Hadoop HDFS Repository plugin adds support for using an HDFS file system -as a repository. +The Hadoop HDFS Repository plugin adds support for using HDFS as a repository. [float] @@ -40,3 +39,5 @@ include::repository-azure.asciidoc[] include::repository-s3.asciidoc[] +include::repository-hdfs.asciidoc[] + diff --git a/docs/reference/aggregations/bucket/filters-aggregation.asciidoc b/docs/reference/aggregations/bucket/filters-aggregation.asciidoc index 3e81e99eb99..322dccb790f 100644 --- a/docs/reference/aggregations/bucket/filters-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/filters-aggregation.asciidoc @@ -46,19 +46,18 @@ Response: "buckets" : { "errors" : { "doc_count" : 34, - "monthly" : { - "buckets" : [ - ... // the histogram monthly breakdown - ] - } - }, - "warnings" : { - "doc_count" : 439, - "monthly" : { - "buckets" : [ - ... // the histogram monthly breakdown - ] - } + "monthly" : { + "buckets" : [ + ... // the histogram monthly breakdown + ] + } + }, + "warnings" : { + "doc_count" : 439, + "monthly" : { + "buckets" : [ + ... // the histogram monthly breakdown + ] } } } diff --git a/docs/reference/aggregations/misc.asciidoc b/docs/reference/aggregations/misc.asciidoc index 074a6eaaca3..1be8373c1ee 100644 --- a/docs/reference/aggregations/misc.asciidoc +++ b/docs/reference/aggregations/misc.asciidoc @@ -44,7 +44,7 @@ Consider this example where we want to associate the color blue with our `terms` -------------------------------------------------- { ... - aggs": { + "aggs": { "titles": { "terms": { "field": "title" diff --git a/docs/reference/analysis.asciidoc b/docs/reference/analysis.asciidoc index 7009ca35444..8461b5c010f 100644 --- a/docs/reference/analysis.asciidoc +++ b/docs/reference/analysis.asciidoc @@ -73,5 +73,3 @@ include::analysis/tokenfilters.asciidoc[] include::analysis/charfilters.asciidoc[] -include::analysis/icu-plugin.asciidoc[] - diff --git a/docs/reference/analysis/charfilters.asciidoc b/docs/reference/analysis/charfilters.asciidoc index a40cfffc054..c9f5805284c 100644 --- a/docs/reference/analysis/charfilters.asciidoc +++ b/docs/reference/analysis/charfilters.asciidoc @@ -3,7 +3,7 @@ Character filters are used to preprocess the string of characters before it is passed to the <>. -A character filter may be used to strip out HTML markup, , or to convert +A character filter may be used to strip out HTML markup, or to convert `"&"` characters to the word `"and"`. Elasticsearch has built in characters filters which can be diff --git a/docs/reference/analysis/icu-plugin.asciidoc b/docs/reference/analysis/icu-plugin.asciidoc deleted file mode 100644 index 9c979464c80..00000000000 --- a/docs/reference/analysis/icu-plugin.asciidoc +++ /dev/null @@ -1,246 +0,0 @@ -[[analysis-icu-plugin]] -== ICU Analysis Plugin - -The http://icu-project.org/[ICU] analysis plugin allows for unicode -normalization, collation and folding. The plugin is called -https://github.com/elasticsearch/elasticsearch-analysis-icu[elasticsearch-analysis-icu]. - -The plugin includes the following analysis components: - -[float] -[[icu-normalization]] -=== ICU Normalization - -Normalizes characters as explained -http://userguide.icu-project.org/transforms/normalization[here]. It -registers itself by default under `icu_normalizer` or `icuNormalizer` -using the default settings. Allows for the name parameter to be provided -which can include the following values: `nfc`, `nfkc`, and `nfkc_cf`. -Here is a sample settings: - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "normalization" : { - "tokenizer" : "keyword", - "filter" : ["icu_normalizer"] - } - } - } - } -} --------------------------------------------------- - -[float] -[[icu-folding]] -=== ICU Folding - -Folding of unicode characters based on `UTR#30`. It registers itself -under `icu_folding` and `icuFolding` names. -The filter also does lowercasing, which means the lowercase filter can -normally be left out. Sample setting: - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "folding" : { - "tokenizer" : "keyword", - "filter" : ["icu_folding"] - } - } - } - } -} --------------------------------------------------- - -[float] -[[icu-filtering]] -==== Filtering - -The folding can be filtered by a set of unicode characters with the -parameter `unicodeSetFilter`. This is useful for a non-internationalized -search engine where retaining a set of national characters which are -primary letters in a specific language is wanted. See syntax for the -UnicodeSet -http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[here]. - -The Following example exempts Swedish characters from the folding. Note -that the filtered characters are NOT lowercased which is why we add that -filter below. - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "folding" : { - "tokenizer" : "standard", - "filter" : ["my_icu_folding", "lowercase"] - } - } - "filter" : { - "my_icu_folding" : { - "type" : "icu_folding" - "unicodeSetFilter" : "[^åäöÅÄÖ]" - } - } - } - } -} --------------------------------------------------- - -[float] -[[icu-collation]] -=== ICU Collation - -Uses collation token filter. Allows to either specify the rules for -collation (defined -http://www.icu-project.org/userguide/Collate_Customization.html[here]) -using the `rules` parameter (can point to a location or expressed in the -settings, location can be relative to config location), or using the -`language` parameter (further specialized by country and variant). By -default registers under `icu_collation` or `icuCollation` and uses the -default locale. - -Here is a sample settings: - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "collation" : { - "tokenizer" : "keyword", - "filter" : ["icu_collation"] - } - } - } - } -} --------------------------------------------------- - -And here is a sample of custom collation: - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "collation" : { - "tokenizer" : "keyword", - "filter" : ["myCollator"] - } - }, - "filter" : { - "myCollator" : { - "type" : "icu_collation", - "language" : "en" - } - } - } - } -} --------------------------------------------------- - -[float] -==== Options - -[horizontal] -`strength`:: - The strength property determines the minimum level of difference considered significant during comparison. - The default strength for the Collator is `tertiary`, unless specified otherwise by the locale used to create the Collator. - Possible values: `primary`, `secondary`, `tertiary`, `quaternary` or `identical`. - + - See http://icu-project.org/apiref/icu4j/com/ibm/icu/text/Collator.html[ICU Collation] documentation for a more detailed - explanation for the specific values. - -`decomposition`:: - Possible values: `no` or `canonical`. Defaults to `no`. Setting this decomposition property with - `canonical` allows the Collator to handle un-normalized text properly, producing the same results as if the text were - normalized. If `no` is set, it is the user's responsibility to insure that all text is already in the appropriate form - before a comparison or before getting a CollationKey. Adjusting decomposition mode allows the user to select between - faster and more complete collation behavior. Since a great many of the world's languages do not require text - normalization, most locales set `no` as the default decomposition mode. - -[float] -==== Expert options: - -[horizontal] -`alternate`:: - Possible values: `shifted` or `non-ignorable`. Sets the alternate handling for strength `quaternary` - to be either shifted or non-ignorable. What boils down to ignoring punctuation and whitespace. - -`caseLevel`:: - Possible values: `true` or `false`. Default is `false`. Whether case level sorting is required. When - strength is set to `primary` this will ignore accent differences. - -`caseFirst`:: - Possible values: `lower` or `upper`. Useful to control which case is sorted first when case is not ignored - for strength `tertiary`. - -`numeric`:: - Possible values: `true` or `false`. Whether digits are sorted according to numeric representation. For - example the value `egg-9` is sorted before the value `egg-21`. Defaults to `false`. - -`variableTop`:: - Single character or contraction. Controls what is variable for `alternate`. - -`hiraganaQuaternaryMode`:: - Possible values: `true` or `false`. Defaults to `false`. Distinguishing between Katakana and - Hiragana characters in `quaternary` strength . - -[float] -=== ICU Tokenizer - -Breaks text into words according to UAX #29: Unicode Text Segmentation ((http://www.unicode.org/reports/tr29/)). - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "collation" : { - "tokenizer" : "icu_tokenizer", - } - } - } - } -} --------------------------------------------------- - - -[float] -=== ICU Normalization CharFilter - -Normalizes characters as explained http://userguide.icu-project.org/transforms/normalization[here]. -It registers itself by default under `icu_normalizer` or `icuNormalizer` using the default settings. -Allows for the name parameter to be provided which can include the following values: `nfc`, `nfkc`, and `nfkc_cf`. -Allows for the mode parameter to be provided which can include the following values: `compose` and `decompose`. -Use `decompose` with `nfc` or `nfkc`, to get `nfd` or `nfkd`, respectively. -Here is a sample settings: - -[source,js] --------------------------------------------------- -{ - "index" : { - "analysis" : { - "analyzer" : { - "collation" : { - "tokenizer" : "keyword", - "char_filter" : ["icu_normalizer"] - } - } - } - } -} --------------------------------------------------- diff --git a/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc index b5d1b5cde10..c1e278b2183 100644 --- a/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc @@ -3,7 +3,7 @@ The `cjk_bigram` token filter forms bigrams out of the CJK terms that are generated by the <> -or the `icu_tokenizer` (see <>). +or the `icu_tokenizer` (see {plugins}/analysis-icu-tokenizer.html[`analysis-icu` plugin]). By default, when a CJK character has no adjacent characters to form a bigram, it is output in unigram form. If you always want to output both unigrams and diff --git a/docs/reference/analysis/tokenfilters/cjk-width-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/cjk-width-tokenfilter.asciidoc index 4f5d55d4de1..21bde5509a6 100644 --- a/docs/reference/analysis/tokenfilters/cjk-width-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/cjk-width-tokenfilter.asciidoc @@ -7,6 +7,6 @@ The `cjk_width` token filter normalizes CJK width differences: * Folds halfwidth Katakana variants into the equivalent Kana NOTE: This token filter can be viewed as a subset of NFKC/NFKD -Unicode normalization. See the <> +Unicode normalization. See the {plugins}/analysis-icu-normalization-charfilter.html[`analysis-icu` plugin] for full normalization support. diff --git a/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc index 293b51a0331..b64f5edbeb9 100644 --- a/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc @@ -3,9 +3,7 @@ Named `delimited_payload_filter`. Splits tokens into tokens and payload whenever a delimiter character is found. -Example: "the|1 quick|2 fox|3" is split per default int to tokens `fox`, `quick` and `the` with payloads `1`, `2` and `3` respectively. - - +Example: "the|1 quick|2 fox|3" is split by default into tokens `the`, `quick`, and `fox` with payloads `1`, `2`, and `3` respectively. Parameters: diff --git a/docs/reference/analysis/tokenfilters/edgengram-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/edgengram-tokenfilter.asciidoc index 3ba0edeb8ef..be37d24f7dd 100644 --- a/docs/reference/analysis/tokenfilters/edgengram-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/edgengram-tokenfilter.asciidoc @@ -11,6 +11,6 @@ filter type: |Setting |Description |`min_gram` |Defaults to `1`. |`max_gram` |Defaults to `2`. -|`side` |Either `front` or `back`. Defaults to `front`. +|`side` |deprecated. Either `front` or `back`. Defaults to `front`. |====================================================== diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index 08f4c900597..8ec58424730 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -38,6 +38,44 @@ last example will be: }' -------------------------------------------------- +Resetting persistent or transient settings can be done by assigning a +`null` value. If a transient setting is reset, the persistent setting +is applied if available. Otherwise Elasticsearch will fallback to the setting +defined at the configuration file or, if not existent, to the default +value. Here is an example: + +[source,js] +-------------------------------------------------- +curl -XPUT localhost:9200/_cluster/settings -d '{ + "transient" : { + "discovery.zen.minimum_master_nodes" : null + } +}' +-------------------------------------------------- + +Reset settings will not be included in the cluster response. So +the response for the last example will be: + +[source,js] +-------------------------------------------------- +{ + "persistent" : {}, + "transient" : {} +} +-------------------------------------------------- + +Settings can also be reset using simple wildcards. For instance to reset +all dynamic `discovery.zen` setting a prefix can be used: + +[source,js] +-------------------------------------------------- +curl -XPUT localhost:9200/_cluster/settings -d '{ + "transient" : { + "discovery.zen.*" : null + } +}' +-------------------------------------------------- + Cluster wide settings can be returned using: [source,js] diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 7d2dbfaeede..25a4f34fa46 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -223,6 +223,7 @@ Can't be used to update the routing of an existing document. Parent is used to route the update request to the right shard and sets the parent for the upsert request if the document being updated doesn't exist. Can't be used to update the `parent` of an existing document. +If an alias index routing is specified then it overrides the parent routing and it is used to route the request. `timeout`:: diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index a2b73a44842..56e9d4ddb91 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -129,19 +129,6 @@ specific index module: experimental[] Disables the purge of <> on the current index. -[[index.recovery.initial_shards]]`index.recovery.initial_shards`:: -+ --- -A primary shard is only recovered only if there are enough nodes available to -allocate sufficient replicas to form a quorum. It can be set to: - - * `quorum` (default) - * `quorum-1` (or `half`) - * `full` - * `full-1`. - * Number values are also supported, e.g. `1`. --- - [float] === Settings in other index modules diff --git a/docs/reference/index-modules/similarity.asciidoc b/docs/reference/index-modules/similarity.asciidoc index ddec26b8030..df37e7876c1 100644 --- a/docs/reference/index-modules/similarity.asciidoc +++ b/docs/reference/index-modules/similarity.asciidoc @@ -48,10 +48,10 @@ Here we configure the DFRSimilarity so it can be referenced as === Available similarities [float] -[[default-similarity]] -==== Default similarity +[[classic-similarity]] +==== Classic similarity -The default similarity that is based on the TF/IDF model. This +The classic similarity that is based on the TF/IDF model. This similarity has the following option: `discount_overlaps`:: @@ -59,7 +59,7 @@ similarity has the following option: 0 position increment) are ignored when computing norm. By default this is true, meaning overlap tokens do not count when computing norms. -Type name: `default` +Type name: `classic` [float] [[bm25]] diff --git a/docs/reference/index-modules/translog.asciidoc b/docs/reference/index-modules/translog.asciidoc index ad704299529..de72bed7ac8 100644 --- a/docs/reference/index-modules/translog.asciidoc +++ b/docs/reference/index-modules/translog.asciidoc @@ -20,7 +20,6 @@ replaying its operations take a considerable amount of time during recovery. It is also exposed through an API, though its rarely needed to be performed manually. - [float] === Flush settings @@ -31,10 +30,6 @@ control how often the in-memory buffer is flushed to disk: Once the translog hits this size, a flush will happen. Defaults to `512mb`. -`index.translog.flush_threshold_ops`:: - -After how many operations to flush. Defaults to `unlimited`. - [float] === Translog settings @@ -75,25 +70,4 @@ update, or bulk request. This setting accepts the following parameters: `fsync` and commit in the background every `sync_interval`. In the event of hardware failure, all acknowledged writes since the last automatic commit will be discarded. --- - -`index.translog.fs.type`:: -+ --- - -Whether to buffer writes to the transaction log in memory or not. This -setting accepts the following parameters: - -`buffered`:: - - (default) Translog writes first go to a 64kB buffer in memory, - and are only written to the disk when the buffer is full, or when an - `fsync` is triggered by a write request or the `sync_interval`. - -`simple`:: - - Translog writes are written to the file system immediately, without - buffering. However, these writes will only be persisted to disk when an - `fsync` and commit is triggered by a write request or the `sync_interval`. - --- +-- \ No newline at end of file diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 57faa9718f9..78e871de232 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -193,8 +193,8 @@ curl -XPOST 'http://localhost:9200/_aliases' -d ' As shown in the example above, search routing may contain several values separated by comma. Index routing can contain only a single value. -If an operation that uses routing alias also has a routing parameter, an -intersection of both alias routing and routing specified in the +If a search operation that uses routing alias also has a routing parameter, an +intersection of both search alias routing and routing specified in the parameter is used. For example the following command will use "2" as a routing value: @@ -203,6 +203,9 @@ routing value: curl -XGET 'http://localhost:9200/alias2/_search?q=user:kimchy&routing=2,3' -------------------------------------------------- +If an index operation that uses index routing alias also has a parent routing, the +parent routing is ignored. + [float] [[alias-adding]] === Add a single alias diff --git a/docs/reference/indices/shadow-replicas.asciidoc b/docs/reference/indices/shadow-replicas.asciidoc index da74a651242..0d589adb64a 100644 --- a/docs/reference/indices/shadow-replicas.asciidoc +++ b/docs/reference/indices/shadow-replicas.asciidoc @@ -104,9 +104,8 @@ settings API: `index.shared_filesystem.recover_on_any_node`:: Boolean value indicating whether the primary shards for the index should be - allowed to recover on any node in the cluster, regardless of the number of - replicas or whether the node has previously had the shard allocated to it - before. Defaults to `false`. + allowed to recover on any node in the cluster. If a node holding a copy of + the shard is found, recovery prefers that node. Defaults to `false`. === Node level settings related to shadow replicas diff --git a/docs/reference/indices/shard-stores.asciidoc b/docs/reference/indices/shard-stores.asciidoc index d4d385bd6dc..19acbc44d3f 100644 --- a/docs/reference/indices/shard-stores.asciidoc +++ b/docs/reference/indices/shard-stores.asciidoc @@ -52,8 +52,9 @@ The shard stores information is grouped by indices and shard ids. } }, "version": 4, <4> + "allocation_id": "2iNySv_OQVePRX-yaRH_lQ", <5> "allocation" : "primary" | "replica" | "unused", <6> - "store_exception": ... <5> + "store_exception": ... <7> }, ... ] @@ -66,7 +67,8 @@ The shard stores information is grouped by indices and shard ids. <3> The node information that hosts a copy of the store, the key is the unique node id. <4> The version of the store copy -<5> The status of the store copy, whether it is used as a +<5> The allocation id of the store copy +<6> The status of the store copy, whether it is used as a primary, replica or not used at all -<6> Any exception encountered while opening the shard index or +<7> Any exception encountered while opening the shard index or from earlier engine failure diff --git a/docs/reference/mapping/params/similarity.asciidoc b/docs/reference/mapping/params/similarity.asciidoc index 393f654bcf1..a3fdef1d43b 100644 --- a/docs/reference/mapping/params/similarity.asciidoc +++ b/docs/reference/mapping/params/similarity.asciidoc @@ -15,7 +15,7 @@ similarities. For more details about this expert options, see the The only similarities which can be used out of the box, without any further configuration are: -`default`:: +`classic`:: The Default TF/IDF algorithm used by Elasticsearch and Lucene. See {defguide}/practical-scoring-function.html[Lucene’s Practical Scoring Function] for more information. @@ -49,6 +49,6 @@ PUT my_index } -------------------------------------------------- // AUTOSENSE -<1> The `default_field` uses the `default` similarity (ie TF/IDF). +<1> The `default_field` uses the `classic` similarity (ie TF/IDF). <2> The `bm25_field` uses the `BM25` similarity. diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index 07f87037b07..b4bb06e236c 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -66,7 +66,7 @@ GET my_index/_search ==== Using `nested` fields for arrays of objects If you need to index arrays of objects and to maintain the independence of -each object in the array, you should used the `nested` datatype instead of the +each object in the array, you should use the `nested` datatype instead of the <> datatype. Internally, nested objects index each object in the array as a separate hidden document, meaning that each nested object can be queried independently of the others, with the <>: @@ -110,7 +110,7 @@ GET my_index/_search "bool": { "must": [ { "match": { "user.first": "Alice" }}, - { "match": { "user.last": "White" }} <2> + { "match": { "user.last": "Smith" }} <2> ] } } @@ -127,7 +127,7 @@ GET my_index/_search "bool": { "must": [ { "match": { "user.first": "Alice" }}, - { "match": { "user.last": "Smith" }} <3> + { "match": { "user.last": "White" }} <3> ] } }, @@ -137,14 +137,14 @@ GET my_index/_search "user.first": {} } } - } + } } } -------------------------------------------------- // AUTOSENSE <1> The `user` field is mapped as type `nested` instead of type `object`. -<2> This query doesn't match because `Alice` and `White` are not in the same nested object. +<2> This query doesn't match because `Alice` and `Smith` are not in the same nested object. <3> This query matches because `Alice` and `White` are in the same nested object. <4> `inner_hits` allow us to highlight the matching nested documents. diff --git a/docs/reference/mapping/types/string.asciidoc b/docs/reference/mapping/types/string.asciidoc index 95c682c696f..557f77d9b38 100644 --- a/docs/reference/mapping/types/string.asciidoc +++ b/docs/reference/mapping/types/string.asciidoc @@ -166,7 +166,7 @@ Defaults depend on the <> setting: <>:: Which scoring algorithm or _similarity_ should be used. Defaults - to `default`, which uses TF/IDF. + to `classic`, which uses TF/IDF. <>:: diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 6588f22a85a..190f440a8fd 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -14,6 +14,7 @@ your application to Elasticsearch 3.0. * <> * <> * <> +* <> [[breaking_30_search_changes]] === Search changes @@ -199,6 +200,34 @@ If you are using any of these settings please take the time and review their pur _expert settings_ and should only be used if absolutely necessary. If you have set any of the above setting as persistent cluster settings please use the settings update API and set their superseded keys accordingly. +The following settings have been removed without replacement + + * `indices.recovery.concurrent_small_file_streams` - recoveries are now single threaded. The number of concurrent outgoing recoveries are throttled via allocation deciders + * `indices.recovery.concurrent_file_streams` - recoveries are now single threaded. The number of concurrent outgoing recoveries are throttled via allocation deciders + +==== Translog settings + +The `index.translog.flush_threshold_ops` setting is not supported anymore. In order to control flushes based on the transaction log +growth use `index.translog.flush_threshold_size` instead. Changing the translog type with `index.translog.fs.type` is not supported +anymore, the `buffered` implementation is now the only available option and uses a fixed `8kb` buffer. + +==== Request Cache Settings + +The deprecated settings `index.cache.query.enable` and `indices.cache.query.size` have been removed and are replaced with +`index.requests.cache.enable` and `indices.requests.cache.size` respectively. + +==== Allocation settings + +Allocation settings deprecated in 1.x have been removed: + + * `cluster.routing.allocation.concurrent_recoveries` is superseded by `cluster.routing.allocation.node_concurrent_recoveries` + +Please change the setting in your configuration files or in the clusterstate to use the new settings instead. + +==== Similarity settings + +The 'default' similarity has been renamed to 'classic'. + [[breaking_30_mapping_changes]] === Mapping changes @@ -220,6 +249,20 @@ will still be accepted for indices created before the upgrade to 3.0 for backwar compatibility, but it will have no effect. Indices created on or after 3.0 will reject this option. +==== Object notation + +Core types don't support the object notation anymore, which allowed to provide +values as follows: + +[source,json] +----- +{ + "value": "field_value", + "boost": 42 +} +---- + + [[breaking_30_plugins]] === Plugin changes @@ -515,3 +558,23 @@ from `OsStats.Cpu#getPercent`. Only stored fields are retrievable with this option. The fields option won't be able to load non stored fields from _source anymore. +[[breaking_30_allocation]] +=== Primary shard allocation + +Previously, primary shards were only assigned if a quorum of shard copies were found (configurable using +`index.recovery.initial_shards`, now deprecated). In case where a primary had only a single replica, quorum was defined +to be a single shard. This meant that any shard copy of an index with replication factor 1 could become primary, even it +was a stale copy of the data on disk. This is now fixed by using allocation IDs. + +Allocation IDs assign unique identifiers to shard copies. This allows the cluster to differentiate between multiple +copies of the same data and track which shards have been active, so that after a cluster restart, shard copies +containing only the most recent data can become primaries. + +==== `index.shared_filesystem.recover_on_any_node` changes + +The behavior of `index.shared_filesystem.recover_on_any_node = true` has been changed. Previously, in the case where no +shard copies could be found, an arbitrary node was chosen by potentially ignoring allocation deciders. Now, we take +balancing into account but don't assign the shard if the allocation deciders are not satisfied. The behavior has also changed +in the case where shard copies can be found. Previously, a node not holding the shard copy was chosen if none of the nodes +holding shard copies were satisfying the allocation deciders. Now, the shard will be assigned to a node having a shard copy, +even if none of the nodes holding a shard copy satisfy the allocation deciders. diff --git a/docs/reference/modules/cluster/shards_allocation.asciidoc b/docs/reference/modules/cluster/shards_allocation.asciidoc index 1daf131106d..b650e237629 100644 --- a/docs/reference/modules/cluster/shards_allocation.asciidoc +++ b/docs/reference/modules/cluster/shards_allocation.asciidoc @@ -22,16 +22,20 @@ Enable or disable allocation for specific kinds of shards: This setting does not affect the recovery of local primary shards when restarting a node. A restarted node that has a copy of an unassigned primary -shard will recover that primary immediately, assuming that the -<> setting is -satisfied. +shard will recover that primary immediately, assuming that its allocation id matches +one of the active allocation ids in the cluster state. -- -`cluster.routing.allocation.node_concurrent_recoveries`:: +`cluster.routing.allocation.node_concurrent_incoming_recoveries`:: - How many concurrent shard recoveries are allowed to happen on a node. - Defaults to `2`. + How many concurrent incoming shard recoveries are allowed to happen on a node. Incoming recoveries are the recoveries + where the target shard (most likely the replica unless a shard is relocating) is allocated on the node. Defaults to `2`. + +`cluster.routing.allocation.node_concurrent_outgoing_recoveries`:: + + How many concurrent outgoing shard recoveries are allowed to happen on a node. Outgoing recoveries are the recoveries + where the source shard (most likely the primary unless a shard is relocating) is allocated on the node. Defaults to `2`. `cluster.routing.allocation.node_initial_primaries_recoveries`:: @@ -48,17 +52,6 @@ satisfied. Defaults to `false`, meaning that no check is performed by default. This setting only applies if multiple nodes are started on the same machine. -`indices.recovery.concurrent_streams`:: - - The number of network streams to open per node to recover a shard from - a peer shard. Defaults to `3`. - -`indices.recovery.concurrent_small_file_streams`:: - - The number of streams to open per node for small files (under 5mb) to - recover a shard from a peer shard. Defaults to `2`. - - [float] === Shard Rebalancing Settings diff --git a/docs/reference/modules/indices/recovery.asciidoc b/docs/reference/modules/indices/recovery.asciidoc index cd21f135e38..8de3309347c 100644 --- a/docs/reference/modules/indices/recovery.asciidoc +++ b/docs/reference/modules/indices/recovery.asciidoc @@ -3,12 +3,6 @@ The following _expert_ settings can be set to manage the recovery policy. -`indices.recovery.concurrent_streams`:: - Defaults to `3`. - -`indices.recovery.concurrent_small_file_streams`:: - Defaults to `2`. - `indices.recovery.file_chunk_size`:: Defaults to `512kb`. diff --git a/docs/reference/query-dsl/term-query.asciidoc b/docs/reference/query-dsl/term-query.asciidoc index a34ae5b0a68..85608ca3aa5 100644 --- a/docs/reference/query-dsl/term-query.asciidoc +++ b/docs/reference/query-dsl/term-query.asciidoc @@ -137,7 +137,7 @@ GET my_index/my_type/_search { "query": { "term": { - "exact_value": "foxes" <3> + "full_text": "foxes" <3> } } } diff --git a/docs/reference/search.asciidoc b/docs/reference/search.asciidoc index 2d8a1f8bc9a..da7d2e5ee4b 100644 --- a/docs/reference/search.asciidoc +++ b/docs/reference/search.asciidoc @@ -95,6 +95,8 @@ include::search/validate.asciidoc[] include::search/explain.asciidoc[] +include::search/profile.asciidoc[] + include::search/percolate.asciidoc[] include::search/field-stats.asciidoc[] diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc new file mode 100644 index 00000000000..6e11b4c7650 --- /dev/null +++ b/docs/reference/search/profile.asciidoc @@ -0,0 +1,604 @@ +[[search-profile]] +== Profile API + +experimental[] + +The Profile API provides detailed timing information about the execution of individual components +in a query. It gives the user insight into how queries are executed at a low level so that +the user can understand why certain queries are slow, and take steps to improve their slow queries. + +The output from the Profile API is *very* verbose, especially for complicated queries executed across +many shards. Pretty-printing the response is recommended to help understand the output + +[NOTE] +======================================= +The details provided by the Profile API directly expose Lucene class names and concepts, which means +that complete interpretation of the results require fairly advanced knowledge of Lucene. This +page attempts to give a crash-course in how Lucene executes queries so that you can use the Profile API to successfully +diagnose and debug queries, but it is only an overview. For complete understanding, please refer +to Lucene's documentation and, in places, the code. + +With that said, a complete understanding is often not required to fix a slow query. It is usually +sufficient to see that a particular component of a query is slow, and not necessarily understand why +the `advance` phase of that query is the cause, for example. +======================================= + +[float] +=== Usage + +Any `_search` request can be profiled by adding a top-level `profile` parameter: + +[source,js] +-------------------------------------------------- +curl -XGET 'localhost:9200/_search' -d '{ + "profile": true,<1> + "query" : { + "match" : { "message" : "search test" } + } +} +-------------------------------------------------- +<1> Setting the top-level `profile` parameter to `true` will enable profiling +for the search + +This will yield the following result: + +[source,js] +-------------------------------------------------- +{ + "took": 25, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "failed": 0 + }, + "hits": { + "total": 1, + "max_score": 1, + "hits": [ ... ] <1> + }, + "profile": { + "shards": [ + { + "id": "[htuC6YnSSSmKFq5UBt0YMA][test][0]", + "searches": [ + { + "query": [ + { + "query_type": "BooleanQuery", + "lucene": "message:search message:test", + "time": "15.52889800ms", + "breakdown": { + "score": 0, + "next_doc": 24495, + "match": 0, + "create_weight": 8488388, + "build_scorer": 7016015, + "advance": 0 + }, + "children": [ + { + "query_type": "TermQuery", + "lucene": "message:search", + "time": "4.938855000ms", + "breakdown": { + "score": 0, + "next_doc": 18332, + "match": 0, + "create_weight": 2945570, + "build_scorer": 1974953, + "advance": 0 + } + }, + { + "query_type": "TermQuery", + "lucene": "message:test", + "time": "0.5016660000ms", + "breakdown": { + "score": 0, + "next_doc": 0, + "match": 0, + "create_weight": 170534, + "build_scorer": 331132, + "advance": 0 + } + } + ] + } + ], + "rewrite_time": 185002, + "collector": [ + { + "name": "SimpleTopScoreDocCollector", + "reason": "search_top_hits", + "time": "2.206529000ms" + } + ] + } + ] + } + ] + } +} +-------------------------------------------------- +<1> Search results are returned, but were omitted here for brevity + +Even for a simple query, the response is relatively complicated. Let's break it down piece-by-piece before moving +to more complex examples. + +First, the overall structure of the profile response is as follows: + +[source,js] +-------------------------------------------------- +{ + "profile": { + "shards": [ + { + "id": "[htuC6YnSSSmKFq5UBt0YMA][test][0]", <1> + "searches": [ + { + "query": [...], <2> + "rewrite_time": 185002, <3> + "collector": [...] <4> + } + ] + } + ] + } +} +-------------------------------------------------- +<1> A profile is returned for each shard that participated in the response, and is identified +by a unique ID +<2> Each profile contains a section which holds details about the query execution +<3> Each profile has a single time representing the cumulative rewrite time +<4> Each profile also contains a section about the Lucene Collectors which run the search + +Because a search request may be executed against one or more shards in an index, and a search may cover +one or more indices, the top level element in the profile response is an array of `shard` objects. +Each shard object lists it's `id` which uniquely identifies the shard. The ID's format is +`[nodeID][indexName][shardID]`. + +The profile itself may consist of one or more "searches", where a search is a query executed against the underlying +Lucene index. Most Search Requests submitted by the user will only execute a single `search` against the Lucene index. +But occasionally multiple searches will be executed, such as including a global aggregation (which needs to execute +a secondary "match_all" query for the global context). + +Inside each `search` object there will be two arrays of profiled information: +a `query` array and a `collector` array. In the future, more sections may be added, such as `suggest`, `highlight`, +`aggregations`, etc + +There will also be a `rewrite` metric showing the total time spent rewriting the query (in nanoseconds). + +=== `query` Section + +The `query` section contains detailed timing of the query tree executed by Lucene on a particular shard. +The overall structure of this query tree will resemble your original Elasticsearch query, but may be slightly +(or sometimes very) different. It will also use similar but not always identical naming. Using our previous +`term` query example, let's analyze the `query` section: + +[source,js] +-------------------------------------------------- +"query": [ + { + "query_type": "BooleanQuery", + "lucene": "message:search message:test", + "time": "15.52889800ms", + "breakdown": {...}, <1> + "children": [ + { + "query_type": "TermQuery", + "lucene": "message:search", + "time": "4.938855000ms", + "breakdown": {...} + }, + { + "query_type": "TermQuery", + "lucene": "message:test", + "time": "0.5016660000ms", + "breakdown": {...} + } + ] + } +] +-------------------------------------------------- +<1> The breakdown timings are omitted for simplicity + +Based on the profile structure, we can see that our `match` query was rewritten by Lucene into a BooleanQuery with two +clauses (both holding a TermQuery). The `"query_type"` field displays the Lucene class name, and often aligns with +the equivalent name in Elasticsearch. The `"lucene"` field displays the Lucene explanation text for the query, and +is made available to help differentiating between parts of your query (e.g. both `"message:search"` and `"message:test"` +are TermQuery's and would appear identical otherwise. + +The `"time"` field shows that this query took ~15ms for the entire BooleanQuery to execute. The recorded time is inclusive +of all children. + +The `"breakdown"` field will give detailed stats about how the time was spent, we'll look at +that in a moment. Finally, the `"children"` array lists any sub-queries that may be present. Because we searched for two +values ("search test"), our BooleanQuery holds two children TermQueries. They have identical information (query_type, time, +breakdown, etc). Children are allowed to have their own children. + +==== Timing Breakdown + +The `breakdown` component lists detailed timing statistics about low-level Lucene execution: + +[source,js] +-------------------------------------------------- +"breakdown": { + "score": 0, + "next_doc": 24495, + "match": 0, + "create_weight": 8488388, + "build_scorer": 7016015, + "advance": 0 + +} +-------------------------------------------------- + +Timings are listed in wall-clock nanoseconds and are not normalized at all. All caveats about the overall +`time` apply here. The intention of the breakdown is to give you a feel for A) what machinery in Lucene is +actually eating time, and B) the magnitude of differences in times between the various components. Like the overall time, +the breakdown is inclusive of all children times. + +The meaning of the stats are as follows: + +[float] +=== All parameters: + +[horizontal] +`create_weight`:: + + A Query in Lucene must be capable of reuse across multiple IndexSearchers (think of it as the engine that + executes a search against a specific Lucene Index). This puts Lucene in a tricky spot, since many queries + need to accumulate temporary state/statistics associated with the index it is being used against, but the + Query contract mandates that it must be immutable. + {empty} + + {empty} + + To get around this, Lucene asks each query to generate a Weight object which acts as a temporary context + object to hold state associated with this particular (IndexSearcher, Query) tuple. The `weight` metric + shows how long this process takes + +`build_scorer`:: + + This parameter shows how long it takes to build a Scorer for the query. A Scorer is the mechanism that + iterates over matching documents generates a score per-document (e.g. how well does "foo" match the document?). + Note, this records the time required to generate the Scorer object, not actuall score the documents. Some + queries have faster or slower initialization of the Scorer, depending on optimizations, complexity, etc. + {empty} + + {empty} + + This may also showing timing associated with caching, if enabled and/or applicable for the query + +`next_doc`:: + + The Lucene method `next_doc` returns Doc ID of the next document matching the query. This statistic shows + the time it takes to determine which document is the next match, a process that varies considerably depending + on the nature of the query. Next_doc is a specialized form of advance() which is more convenient for many + queries in Lucene. It is equivalent to advance(docId() + 1) + +`advance`:: + + `advance` is the "lower level" version of next_doc: it serves the same purpose of finding the next matching + doc, but requires the calling query to perform extra tasks such as identifying and moving past skips, etc. + However, not all queries can use next_doc, so `advance` is also timed for those queries. + {empty} + + {empty} + + Conjunctions (e.g. `must` clauses in a boolean) are typical consumers of `advance` + +`matches`:: + + Some queries, such as phrase queries, match documents using a "Two Phase" process. First, the document is + "approximately" matched, and if it matches approximately, it is checked a second time with a more rigorous + (and expensive) process. The second phase verification is what the `matches` statistic measures. + {empty} + + {empty} + + For example, a phrase query first checks a document approximately by ensuring all terms in the phrase are + present in the doc. If all the terms are present, it then executes the second phase verification to ensure + the terms are in-order to form the phrase, which is relatively more expensive than just checking for presence + of the terms. + {empty} + + {empty} + + Because this two-phase process is only used by a handful of queries, the `metric` statistic will often be zero + +`score`:: + + This records the time taken to score a particular document via it's Scorer + + +=== `collectors` Section + +The Collectors portion of the response shows high-level execution details. Lucene works by defining a "Collector" +which is responsible for coordinating the traversal, scoring and collection of matching documents. Collectors +are also how a single query can record aggregation results, execute unscoped "global" queries, execute post-query +filters, etc. + +Looking at the previous example: + +[source,js] +-------------------------------------------------- +"collector": [ + { + "name": "SimpleTopScoreDocCollector", + "reason": "search_top_hits", + "time": "2.206529000ms" + } +] +-------------------------------------------------- + +We see a single collector named `SimpleTopScoreDocCollector`. This is the default "scoring and sorting" Collector +used by Elasticsearch. The `"reason"` field attempts to give an plain english description of the class name. The +`"time` is similar to the time in the Query tree: a wall-clock time inclusive of all children. Similarly, `children` lists +all sub-collectors. + +It should be noted that Collector times are **independent** from the Query times. They are calculated, combined +and normalized independently! Due to the nature of Lucene's execution, it is impossible to "merge" the times +from the Collectors into the Query section, so they are displayed in separate portions. + +For reference, the various collector reason's are: + +[horizontal] +`search_sorted`:: + + A collector that scores and sorts documents. This is the most common collector and will be seen in most + simple searches + +`search_count`:: + + A collector that only counts the number of documents that match the query, but does not fetch the source. + This is seen when `size: 0` or `search_type=count` is specified + +`search_terminate_after_count`:: + + A collector that terminates search execution after `n` matching documents have been found. This is seen + when the `terminate_after_count` query parameter has been specified + +`search_min_score`:: + + A collector that only returns matching documents that have a score greater than `n`. This is seen when + the top-level paramenter `min_score` has been specified. + +`search_multi`:: + + A collector that wraps several other collectors. This is seen when combinations of search, aggregations, + global aggs and post_filters are combined in a single search. + +`search_timeout`:: + + A collector that halts execution after a specified period of time. This is seen when a `timeout` top-level + parameter has been specified. + +`aggregation`:: + + A collector that Elasticsearch uses to run aggregations against the query scope. A single `aggregation` + collector is used to collect documents for *all* aggregations, so you will see a list of aggregations + in the name rather. + +`global_aggregation`:: + + A collector that executes an aggregation against the global query scope, rather than the specified query. + Because the global scope is necessarily different from the executed query, it must execute it's own + match_all query (which you will see added to the Query section) to collect your entire dataset + + + +=== `rewrite` Section + +All queries in Lucene undergo a "rewriting" process. A query (and its sub-queries) may be rewritten one or +more times, and the process continues until the query stops changing. This process allows Lucene to perform +optimizations, such as removing redundant clauses, replacing one query for a more efficient execution path, +etc. For example a Boolean -> Boolean -> TermQuery can be rewritten to a TermQuery, because all the Booleans +are unnecessary in this case. + +The rewriting process is complex and difficult to display, since queries can change drastically. Rather than +showing the intermediate results, the total rewrite time is simply displayed as a value (in nanoseconds). This +value is cumulative and contains the total time for all queries being rewritten. + +=== A more complex example + +To demonstrate a slightly more complex query and the associated results, we can profile the following query: + +[source,js] +-------------------------------------------------- +GET /test/_search +{ + "profile": true, + "query": { + "term": { + "message": { + "value": "search" + } + } + }, + "aggs": { + "non_global_term": { + "terms": { + "field": "agg" + }, + "aggs": { + "second_term": { + "terms": { + "field": "sub_agg" + } + } + } + }, + "another_agg": { + "cardinality": { + "field": "aggB" + } + }, + "global_agg": { + "global": {}, + "aggs": { + "my_agg2": { + "terms": { + "field": "globalAgg" + } + } + } + } + }, + "post_filter": { + "term": { + "my_field": "foo" + } + } +} +-------------------------------------------------- + +This example has: + +- A query +- A scoped aggregation +- A global aggregation +- A post_filter + +And the response: + + +[source,js] +-------------------------------------------------- +{ + "profile": { + "shards": [ + { + "id": "[P6-vulHtQRWuD4YnubWb7A][test][0]", + "searches": [ + { + "query": [ + { + "query_type": "TermQuery", + "lucene": "my_field:foo", + "time": "0.4094560000ms", + "breakdown": { + "score": 0, + "next_doc": 0, + "match": 0, + "create_weight": 31584, + "build_scorer": 377872, + "advance": 0 + } + }, + { + "query_type": "TermQuery", + "lucene": "message:search", + "time": "0.3037020000ms", + "breakdown": { + "score": 0, + "next_doc": 5936, + "match": 0, + "create_weight": 185215, + "build_scorer": 112551, + "advance": 0 + } + } + ], + "rewrite_time": 7208, + "collector": [ + { + "name": "MultiCollector", + "reason": "search_multi", + "time": "1.378943000ms", + "children": [ + { + "name": "FilteredCollector", + "reason": "search_post_filter", + "time": "0.4036590000ms", + "children": [ + { + "name": "SimpleTopScoreDocCollector", + "reason": "search_top_hits", + "time": "0.006391000000ms" + } + ] + }, + { + "name": "BucketCollector: [[non_global_term, another_agg]]", + "reason": "aggregation", + "time": "0.9546020000ms" + } + ] + } + ] + }, + { + "query": [ + { + "query_type": "MatchAllDocsQuery", + "lucene": "*:*", + "time": "0.04829300000ms", + "breakdown": { + "score": 0, + "next_doc": 3672, + "match": 0, + "create_weight": 6311, + "build_scorer": 38310, + "advance": 0 + } + } + ], + "rewrite_time": 1067, + "collector": [ + { + "name": "GlobalAggregator: [global_agg]", + "reason": "aggregation_global", + "time": "0.1226310000ms" + } + ] + } + ] + } + ] + } +} +-------------------------------------------------- + +As you can see, the output is significantly verbose from before. All the major portions of the query are +represented: + +1. The first `TermQuery` (message:search) represents the main `term` query +2. The second `TermQuery` (my_field:foo) represents the `post_filter` query +3. There is a `MatchAllDocsQuery` (\*:*) query which is being executed as a second, distinct search. This was +not part of the query specified by the user, but is auto-generated by the global aggregation to provide a global query scope + +The Collector tree is fairly straightforward, showing how a single MultiCollector wraps a FilteredCollector +to execute the post_filter (and in turn wraps the normal scoring SimpleCollector), a BucketCollector to run +all scoped aggregations. In the MatchAll search, there is a single GlobalAggregator to run the global aggregation. + +=== Performance Notes + +Like any profiler, the Profile API introduce a non-negligible overhead to query execution. The act of instrumenting +low-level method calls such as `advance` and `next_doc` can be fairly expensive, since these methods are called +in tight loops. Therefore, profiling should not be enabled in production settings by default, and should not +be compared against non-profiled query times. Profiling is just a diagnostic tool. + +There are also cases where special Lucene optimizations are disabled, since they are not amenable to profiling. This +could cause some queries to report larger relative times than their non-profiled counterparts, but in general should +not have a drastic effect compared to other components in the profiled query. + +=== Limitations + +- Profiling statistics are currently not available for suggestions, highlighting, `dfs_query_then_fetch` +- Detailed breakdown for aggregations is not currently available past the high-level overview provided +from the Collectors +- The Profiler is still highly experimental. The Profiler is instrumenting parts of Lucene that were +never designed to be exposed in this manner, and so all results should be viewed as a best effort to provide detailed +diagnostics. We hope to improve this over time. If you find obviously wrong numbers, strange query structures or +other bugs, please report them! + +=== Understanding MultiTermQuery output + +A special note needs to be made about the `MultiTermQuery` class of queries. This includes wildcards, regex and fuzzy +queries. These queries emit very verbose responses, and are not overly structured. + +Essentially, these queries rewrite themselves on a per-segment basis. If you imagine the wildcard query `b*`, it technically +can match any token that begins with the letter "b". It would be impossible to enumerate all possible combinations, +so Lucene rewrites the query in context of the segment being evaluated. E.g. one segment may contain the tokens +`[bar, baz]`, so the query rewrites to a BooleanQuery combination of "bar" and "baz". Another segment may only have the +token `[bakery]`, so query rewrites to a single TermQuery for "bakery". + +Due to this dynamic, per-segment rewriting, the clean tree structure becomes distorted and no longer follows a clean +"lineage" showing how one query rewrites into the next. At present time, all we can do is apologize, and suggest you +collapse the details for that query's children if it is too confusing. Luckily, all the timing statistics are correct, +just not the physical layout in the response, so it is sufficient to just analyze the top-level MultiTermQuery and +ignore it's children if you find the details too tricky to interpret. + +Hopefully this will be fixed in future iterations, but it is a tricky problem to solve and still in-progress :) \ No newline at end of file diff --git a/docs/reference/search/request/scroll.asciidoc b/docs/reference/search/request/scroll.asciidoc index 825564d799d..e18593d21cc 100644 --- a/docs/reference/search/request/scroll.asciidoc +++ b/docs/reference/search/request/scroll.asciidoc @@ -98,7 +98,7 @@ curl -XGET 'localhost:9200/_search?scroll=1m' -d ' { "sort": [ "_doc" - } + ] } ' -------------------------------------------------- diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index 8d0b6708979..14ab207c301 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -238,7 +238,7 @@ Format in `lat,lon`. "sort" : [ { "_geo_distance" : { - "pin.location" : "-70,40", + "pin.location" : "40,-70", "order" : "asc", "unit" : "km" } @@ -301,7 +301,7 @@ Multiple geo points can be passed as an array containing any `geo_point` format, [source,js] -------------------------------------------------- "pin.location" : [[-70, 40], [-71, 42]] -"pin.location" : [{"lat": -70, "lon": 40}, {"lat": -71, "lon": 42}] +"pin.location" : [{"lat": 40, "lon": -70}, {"lat": 42, "lon": -71}] -------------------------------------------------- and so forth. diff --git a/docs/reference/search/search-template.asciidoc b/docs/reference/search/search-template.asciidoc index 77670acafb1..76262644208 100644 --- a/docs/reference/search/search-template.asciidoc +++ b/docs/reference/search/search-template.asciidoc @@ -365,7 +365,7 @@ Pre-registered templates can also be rendered using GET /_render/template/ { "params": { - "... + "..." } } ------------------------------------------ diff --git a/docs/reference/search/suggesters/phrase-suggest.asciidoc b/docs/reference/search/suggesters/phrase-suggest.asciidoc index bc2f016d288..7ba1c93540b 100644 --- a/docs/reference/search/suggesters/phrase-suggest.asciidoc +++ b/docs/reference/search/suggesters/phrase-suggest.asciidoc @@ -97,20 +97,20 @@ can contain misspellings (See parameter descriptions below). language model, the suggester will use this field to gain statistics to score corrections. This field is mandatory. -`gram_size`:: +`gram_size`:: sets max size of the n-grams (shingles) in the `field`. If the field doesn't contain n-grams (shingles) this should be omitted or set to `1`. Note that Elasticsearch tries to detect the gram size based on the specified `field`. If the field uses a `shingle` filter the `gram_size` is set to the `max_shingle_size` if not explicitly set. -`real_word_error_likelihood`:: +`real_word_error_likelihood`:: the likelihood of a term being a misspelled even if the term exists in the dictionary. The default is `0.95` corresponding to 5% of the real words are misspelled. -`confidence`:: +`confidence`:: The confidence level defines a factor applied to the input phrases score which is used as a threshold for other suggest candidates. Only candidates that score higher than the threshold will be @@ -118,7 +118,7 @@ can contain misspellings (See parameter descriptions below). only return suggestions that score higher than the input phrase. If set to `0.0` the top N candidates are returned. The default is `1.0`. -`max_errors`:: +`max_errors`:: the maximum percentage of the terms that at most considered to be misspellings in order to form a correction. This method accepts a float value in the range `[0..1)` as a fraction of the actual @@ -126,39 +126,39 @@ can contain misspellings (See parameter descriptions below). default is set to `1.0` which corresponds to that only corrections with at most 1 misspelled term are returned. Note that setting this too high can negatively impact performance. Low values like `1` or `2` are recommended - otherwise the time spend in suggest calls might exceed the time spend in + otherwise the time spend in suggest calls might exceed the time spend in query execution. -`separator`:: +`separator`:: the separator that is used to separate terms in the bigram field. If not set the whitespace character is used as a separator. -`size`:: +`size`:: the number of candidates that are generated for each individual query term Low numbers like `3` or `5` typically produce good results. Raising this can bring up terms with higher edit distances. The default is `5`. -`analyzer`:: +`analyzer`:: Sets the analyzer to analyse to suggest text with. Defaults to the search analyzer of the suggest field passed via `field`. -`shard_size`:: +`shard_size`:: Sets the maximum number of suggested term to be retrieved from each individual shard. During the reduce phase, only the top N suggestions are returned based on the `size` option. Defaults to `5`. -`text`:: +`text`:: Sets the text / query to provide suggestions for. `highlight`:: - Sets up suggestion highlighting. If not provided then - no `highlighted` field is returned. If provided must - contain exactly `pre_tag` and `post_tag` which are - wrapped around the changed tokens. If multiple tokens - in a row are changed the entire phrase of changed tokens + Sets up suggestion highlighting. If not provided then + no `highlighted` field is returned. If provided must + contain exactly `pre_tag` and `post_tag` which are + wrapped around the changed tokens. If multiple tokens + in a row are changed the entire phrase of changed tokens is wrapped rather than each token. `collate`:: @@ -192,8 +192,10 @@ curl -XPOST 'localhost:9200/_search' -d { } ], "collate": { "query": { <1> - "match": { - "{{field_name}}" : "{{suggestion}}" <2> + "inline" : { + "match": { + "{{field_name}}" : "{{suggestion}}" <2> + } } }, "params": {"field_name" : "title"}, <3> @@ -217,21 +219,21 @@ curl -XPOST 'localhost:9200/_search' -d { The `phrase` suggester supports multiple smoothing models to balance weight between infrequent grams (grams (shingles) are not existing in -the index) and frequent grams (appear at least once in the index). +the index) and frequent grams (appear at least once in the index). [horizontal] -`stupid_backoff`:: +`stupid_backoff`:: a simple backoff model that backs off to lower order n-gram models if the higher order count is `0` and discounts the lower order n-gram model by a constant factor. The default `discount` is - `0.4`. Stupid Backoff is the default model. + `0.4`. Stupid Backoff is the default model. `laplace`:: a smoothing model that uses an additive smoothing where a constant (typically `1.0` or smaller) is added to all counts to balance - weights, The default `alpha` is `0.5`. + weights, The default `alpha` is `0.5`. -`linear_interpolation`:: +`linear_interpolation`:: a smoothing model that takes the weighted mean of the unigrams, bigrams and trigrams based on user supplied weights (lambdas). Linear Interpolation doesn't have any default values. @@ -244,7 +246,7 @@ The `phrase` suggester uses candidate generators to produce a list of possible terms per term in the given text. A single candidate generator is similar to a `term` suggester called for each individual term in the text. The output of the generators is subsequently scored in combination -with the candidates from the other terms to for suggestion candidates. +with the candidates from the other terms to for suggestion candidates. Currently only one type of candidate generator is supported, the `direct_generator`. The Phrase suggest API accepts a list of generators @@ -256,26 +258,30 @@ called per term in the original text. The direct generators support the following parameters: [horizontal] -`field`:: +`field`:: The field to fetch the candidate suggestions from. This is a required option that either needs to be set globally or per suggestion. -`size`:: +`size`:: The maximum corrections to be returned per suggest text token. `suggest_mode`:: - The suggest mode controls what suggestions are - included or controls for what suggest text terms, suggestions should be - suggested. Three possible values can be specified: - ** `missing`: Only suggest terms in the suggest text that aren't in the - index. This is the default. - ** `popular`: Only suggest suggestions that occur in more docs then the - original suggest text term. + The suggest mode controls what suggestions are included on the suggestions + generated on each shard. All values other than `always` can be thought of + as an optimization to generate fewer suggestions to test on each shard and + are not rechecked at when combining the suggestions generated on each + shard. Thus `missing` will generate suggestions for terms on shards that do + not contain them even other shards do contain them. Those should be + filtered out using `confidence`. Three possible values can be specified: + ** `missing`: Only generate suggestions for terms that are not in the + shard. This is the default. + ** `popular`: Only suggest terms that occur in more docs on the shard then + the original term. ** `always`: Suggest any matching suggestions based on terms in the suggest text. -`max_edits`:: +`max_edits`:: The maximum edit distance candidate suggestions can have in order to be considered as a suggestion. Can only be a value between 1 and 2. Any other value result in an bad request error being thrown. @@ -287,11 +293,11 @@ The direct generators support the following parameters: this number improves spellcheck performance. Usually misspellings don't occur in the beginning of terms. (Old name "prefix_len" is deprecated) -`min_word_length`:: +`min_word_length`:: The minimum length a suggest text term must have in order to be included. Defaults to 4. (Old name "min_word_len" is deprecated) -`max_inspections`:: +`max_inspections`:: A factor that is used to multiply with the `shards_size` in order to inspect more candidate spell corrections on the shard level. Can improve accuracy at the cost of performance. @@ -306,7 +312,7 @@ The direct generators support the following parameters: cannot be fractional. The shard level document frequencies are used for this option. -`max_term_freq`:: +`max_term_freq`:: The maximum threshold in number of documents a suggest text token can exist in order to be included. Can be a relative percentage number (e.g 0.4) or an absolute number to represent document @@ -322,16 +328,16 @@ The direct generators support the following parameters: tokens passed to this candidate generator. This filter is applied to the original token before candidates are generated. -`post_filter`:: +`post_filter`:: a filter (analyzer) that is applied to each of the generated tokens before they are passed to the actual phrase scorer. The following example shows a `phrase` suggest call with two generators, the first one is using a field containing ordinary indexed terms and the -second one uses a field that uses terms indexed with a `reverse` filter -(tokens are index in reverse order). This is used to overcome the limitation -of the direct generators to require a constant prefix to provide -high-performance suggestions. The `pre_filter` and `post_filter` options +second one uses a field that uses terms indexed with a `reverse` filter +(tokens are index in reverse order). This is used to overcome the limitation +of the direct generators to require a constant prefix to provide +high-performance suggestions. The `pre_filter` and `post_filter` options accept ordinary analyzer names. [source,js] diff --git a/docs/reference/tasks/list.asciidoc b/docs/reference/tasks/list.asciidoc new file mode 100644 index 00000000000..bfd7f12c43f --- /dev/null +++ b/docs/reference/tasks/list.asciidoc @@ -0,0 +1,46 @@ +[[tasks-list]] +== Tasks List + +The task management API allows to retrieve information about currently running tasks. + +[source,js] +-------------------------------------------------- +curl -XGET 'http://localhost:9200/_tasks' +curl -XGET 'http://localhost:9200/_tasks/nodeId1,nodeId2' +curl -XGET 'http://localhost:9200/_tasks/nodeId1,nodeId2/cluster:*' +-------------------------------------------------- + +The first command retrieves all tasks currently running on all nodes. +The second command selectively retrieves tasks from nodes +`nodeId1` and `nodeId2`. All the nodes selective options are explained +<>. +The third command retrieves all cluster-related tasks running on nodes `nodeId1` and `nodeId2`. + +The result will look similar to: + +[source,js] +-------------------------------------------------- +{ + "nodes" : { + "fDlEl7PrQi6F-awHZ3aaDw" : { + "name" : "Gazer", + "transport_address" : "127.0.0.1:9300", + "host" : "127.0.0.1", + "ip" : "127.0.0.1:9300", + "tasks" : [ { + "node" : "fDlEl7PrQi6F-awHZ3aaDw", + "id" : 105, + "type" : "transport", + "action" : "cluster:monitor/nodes/tasks" + }, { + "node" : "fDlEl7PrQi6F-awHZ3aaDw", + "id" : 106, + "type" : "direct", + "action" : "cluster:monitor/nodes/tasks[n]", + "parent_node" : "fDlEl7PrQi6F-awHZ3aaDw", + "parent_id" : 105 + } ] + } + } +} +-------------------------------------------------- diff --git a/docs/reference/testing/testing-framework.asciidoc b/docs/reference/testing/testing-framework.asciidoc index 9c0e5f4f10d..e0b27733441 100644 --- a/docs/reference/testing/testing-framework.asciidoc +++ b/docs/reference/testing/testing-framework.asciidoc @@ -116,7 +116,7 @@ public class Mytests extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { - return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) .put("node.mode", "network") .build(); } diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle index 9f62e34687d..9e3943a32b2 100644 --- a/modules/lang-expression/build.gradle +++ b/modules/lang-expression/build.gradle @@ -27,10 +27,12 @@ dependencies { compile 'org.antlr:antlr4-runtime:4.5.1-1' compile 'org.ow2.asm:asm:5.0.4' compile 'org.ow2.asm:asm-commons:5.0.4' + compile 'org.ow2.asm:asm-tree:5.0.4' } dependencyLicenses { mapping from: /lucene-.*/, to: 'lucene' + mapping from: /asm-.*/, to: 'asm' } compileJava.options.compilerArgs << '-Xlint:-rawtypes' diff --git a/modules/lang-expression/licenses/asm-commons-LICENSE.txt b/modules/lang-expression/licenses/asm-commons-LICENSE.txt deleted file mode 100644 index afb064f2f26..00000000000 --- a/modules/lang-expression/licenses/asm-commons-LICENSE.txt +++ /dev/null @@ -1,26 +0,0 @@ -Copyright (c) 2012 France Télécom -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. -3. Neither the name of the copyright holders nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF -THE POSSIBILITY OF SUCH DAMAGE. diff --git a/modules/lang-expression/licenses/asm-commons-NOTICE.txt b/modules/lang-expression/licenses/asm-commons-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3f..00000000000 --- a/modules/lang-expression/licenses/asm-commons-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 b/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 new file mode 100644 index 00000000000..5822a485a61 --- /dev/null +++ b/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 @@ -0,0 +1 @@ +396ce0c07ba2b481f25a70195c7c94922f0d1b0b \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1719088.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 50bb58f443d..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -787356d4ae6142bb8ca7e9713d0a281a797b57fb \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1721183.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..a5332a9ca09 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +60e056d2dd04a81440482b047af0737bc41593d9 \ No newline at end of file diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java index 0fa83d92d76..c50aa4da289 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java @@ -25,7 +25,6 @@ import org.apache.lucene.queries.function.ValueSource; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.search.MultiValueMode; import java.io.IOException; import java.util.Map; @@ -68,6 +67,6 @@ public class CountMethodValueSource extends ValueSource { @Override public String description() { - return "count: field(" + fieldData.getFieldNames().toString() + ")"; + return "count: field(" + fieldData.getFieldName() + ")"; } } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java index f7198fc0ae2..3ed2ed1f0b5 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java @@ -19,14 +19,14 @@ package org.elasticsearch.script.expression; -import java.util.Calendar; -import java.util.Locale; -import java.util.TimeZone; - import org.apache.lucene.queries.function.ValueSource; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.search.MultiValueMode; +import java.util.Calendar; +import java.util.Locale; +import java.util.TimeZone; + class DateMethodFunctionValues extends FieldDataFunctionValues { private final int calendarType; private final Calendar calendar; diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java index 522b546656d..9efeed54ff9 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java @@ -19,18 +19,17 @@ package org.elasticsearch.script.expression; -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; - import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.search.MultiValueMode; +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + class DateMethodValueSource extends FieldDataValueSource { protected final String methodName; @@ -55,7 +54,7 @@ class DateMethodValueSource extends FieldDataValueSource { @Override public String description() { - return methodName + ": field(" + fieldData.getFieldNames().toString() + ")"; + return methodName + ": field(" + fieldData.getFieldName() + ")"; } @Override diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java index a7f93925119..192f69884e8 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java @@ -95,7 +95,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { // classloader created here final SecurityManager sm = System.getSecurityManager(); if (sm != null) { @@ -184,7 +184,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements throw new ScriptException("Variable [" + variable + "] does not follow an allowed format of either doc['field'] or doc['field'].method()"); } - MappedFieldType fieldType = mapper.smartNameFieldType(fieldname); + MappedFieldType fieldType = mapper.fullName(fieldname); if (fieldType == null) { throw new ScriptException("Field [" + fieldname + "] used in expression does not exist in mappings"); diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java index 39386ee4913..708cd0af152 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java @@ -75,6 +75,6 @@ class FieldDataValueSource extends ValueSource { @Override public String description() { - return "field(" + fieldData.getFieldNames().toString() + ")"; + return "field(" + fieldData.getFieldName() + ")"; } } diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java index b05b9630a14..198558381d3 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java @@ -33,23 +33,23 @@ public class ExpressionTests extends ESSingleNodeTestCase { public void testNeedsScores() { IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double"); - + ExpressionScriptEngineService service = new ExpressionScriptEngineService(Settings.EMPTY); SearchLookup lookup = new SearchLookup(index.mapperService(), index.fieldData(), null); - Object compiled = service.compile("1.2"); + Object compiled = service.compile("1.2", Collections.emptyMap()); SearchScript ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.emptyMap()); assertFalse(ss.needsScores()); - compiled = service.compile("doc['d'].value"); + compiled = service.compile("doc['d'].value", Collections.emptyMap()); ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.emptyMap()); assertFalse(ss.needsScores()); - compiled = service.compile("1/_score"); + compiled = service.compile("1/_score", Collections.emptyMap()); ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.emptyMap()); assertTrue(ss.needsScores()); - compiled = service.compile("doc['d'].value * _score"); + compiled = service.compile("doc['d'].value * _score", Collections.emptyMap()); ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.emptyMap()); assertTrue(ss.needsScores()); } diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index 89a5be7ff1c..b4c0106abbe 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -21,15 +21,12 @@ package org.elasticsearch.script.expression; import org.apache.lucene.expressions.Expression; import org.apache.lucene.expressions.js.JavascriptCompiler; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.common.lucene.search.function.CombineFunction; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; @@ -97,7 +94,7 @@ public class MoreExpressionTests extends ESIntegTestCase { assertEquals(1, rsp.getHits().getTotalHits()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); } - + public void testFunction() throws Exception { createIndex("test"); ensureGreen("test"); diff --git a/modules/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle index 341dcbf0d6c..7db4eab4403 100644 --- a/modules/lang-groovy/build.gradle +++ b/modules/lang-groovy/build.gradle @@ -23,7 +23,7 @@ esplugin { } dependencies { - compile 'org.codehaus.groovy:groovy-all:2.4.4:indy' + compile 'org.codehaus.groovy:groovy:2.4.4:indy' } compileJava.options.compilerArgs << '-Xlint:-rawtypes,-unchecked,-cast,-deprecation' @@ -35,3 +35,34 @@ integTest { systemProperty 'es.script.indexed', 'on' } } + +thirdPartyAudit.excludes = [ + // classes are missing, we bring in a minimal groovy dist + // for example we do not need ivy, scripts arent allowed to download code + 'com.thoughtworks.xstream.XStream', + 'groovyjarjarasm.asm.util.Textifiable', + 'org.apache.ivy.Ivy', + 'org.apache.ivy.core.event.IvyListener', + 'org.apache.ivy.core.event.download.PrepareDownloadEvent', + 'org.apache.ivy.core.event.resolve.StartResolveEvent', + 'org.apache.ivy.core.module.descriptor.Configuration', + 'org.apache.ivy.core.module.descriptor.DefaultDependencyArtifactDescriptor', + 'org.apache.ivy.core.module.descriptor.DefaultDependencyDescriptor', + 'org.apache.ivy.core.module.descriptor.DefaultExcludeRule', + 'org.apache.ivy.core.module.descriptor.DefaultModuleDescriptor', + 'org.apache.ivy.core.module.id.ArtifactId', + 'org.apache.ivy.core.module.id.ModuleId', + 'org.apache.ivy.core.module.id.ModuleRevisionId', + 'org.apache.ivy.core.report.ResolveReport', + 'org.apache.ivy.core.resolve.ResolveOptions', + 'org.apache.ivy.core.settings.IvySettings', + 'org.apache.ivy.plugins.matcher.ExactPatternMatcher', + 'org.apache.ivy.plugins.matcher.PatternMatcher', + 'org.apache.ivy.plugins.resolver.IBiblioResolver', + 'org.apache.ivy.util.DefaultMessageLogger', + 'org.apache.ivy.util.Message', + 'org.fusesource.jansi.Ansi$Attribute', + 'org.fusesource.jansi.Ansi$Color', + 'org.fusesource.jansi.Ansi', + 'org.fusesource.jansi.AnsiRenderWriter', +] diff --git a/modules/lang-groovy/licenses/groovy-2.4.4-indy.jar.sha1 b/modules/lang-groovy/licenses/groovy-2.4.4-indy.jar.sha1 new file mode 100644 index 00000000000..30b996327b4 --- /dev/null +++ b/modules/lang-groovy/licenses/groovy-2.4.4-indy.jar.sha1 @@ -0,0 +1 @@ +139af316ac35534120c53f05393ce46d60d6da48 \ No newline at end of file diff --git a/modules/lang-groovy/licenses/groovy-all-LICENSE.txt b/modules/lang-groovy/licenses/groovy-LICENSE.txt similarity index 100% rename from modules/lang-groovy/licenses/groovy-all-LICENSE.txt rename to modules/lang-groovy/licenses/groovy-LICENSE.txt diff --git a/modules/lang-groovy/licenses/groovy-all-NOTICE.txt b/modules/lang-groovy/licenses/groovy-NOTICE.txt similarity index 100% rename from modules/lang-groovy/licenses/groovy-all-NOTICE.txt rename to modules/lang-groovy/licenses/groovy-NOTICE.txt diff --git a/modules/lang-groovy/licenses/groovy-all-2.4.4-indy.jar.sha1 b/modules/lang-groovy/licenses/groovy-all-2.4.4-indy.jar.sha1 deleted file mode 100644 index 458716cefdf..00000000000 --- a/modules/lang-groovy/licenses/groovy-all-2.4.4-indy.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -574a15e35eba5f986a0564ae197c78e843ece954 diff --git a/modules/lang-groovy/licenses/groovy-all-LICENSE-ANTLR.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE-ANTLR.txt deleted file mode 100644 index d62cc1ab2d7..00000000000 --- a/modules/lang-groovy/licenses/groovy-all-LICENSE-ANTLR.txt +++ /dev/null @@ -1,31 +0,0 @@ - -SOFTWARE RIGHTS - -ANTLR 1989-2006 Developed by Terence Parr -Partially supported by University of San Francisco & jGuru.com - -We reserve no legal rights to the ANTLR--it is fully in the -public domain. An individual or company may do whatever -they wish with source code distributed with ANTLR or the -code generated by ANTLR, including the incorporation of -ANTLR, or its output, into commerical software. - -We encourage users to develop software with ANTLR. However, -we do ask that credit is given to us for developing -ANTLR. By "credit", we mean that if you use ANTLR or -incorporate any source code into one of your programs -(commercial product, research project, or otherwise) that -you acknowledge this fact somewhere in the documentation, -research report, etc... If you like ANTLR and have -developed a nice tool with the output, please mention that -you developed it using ANTLR. In addition, we ask that the -headers remain intact in our source code. As long as these -guidelines are kept, we expect to continue enhancing this -system and expect to make other tools available as they are -completed. - -The primary ANTLR guy: - -Terence Parr -parrt@cs.usfca.edu -parrt@antlr.org diff --git a/modules/lang-groovy/licenses/groovy-all-LICENSE-ASM.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE-ASM.txt deleted file mode 100644 index ae898f75545..00000000000 --- a/modules/lang-groovy/licenses/groovy-all-LICENSE-ASM.txt +++ /dev/null @@ -1,31 +0,0 @@ -/*** - * http://asm.objectweb.org/ - * - * ASM: a very small and fast Java bytecode manipulation framework - * Copyright (c) 2000-2005 INRIA, France Telecom - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holders nor the names of its - * contributors may be used to endorse or promote products derived from - * this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF - * THE POSSIBILITY OF SUCH DAMAGE. - */ diff --git a/modules/lang-groovy/licenses/groovy-all-LICENSE-JSR223.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE-JSR223.txt deleted file mode 100644 index b750c0f5f7b..00000000000 --- a/modules/lang-groovy/licenses/groovy-all-LICENSE-JSR223.txt +++ /dev/null @@ -1,30 +0,0 @@ -The following notice applies to the files: - -src/main/org/codehaus/groovy/jsr223/GroovyCompiledScript.java -src/main/org/codehaus/groovy/jsr223/GroovyScriptEngineFactory.java -src/main/org/codehaus/groovy/jsr223/GroovyScriptEngineImpl.java - - -/* - * Copyright 2006 Sun Microsystems, Inc. All rights reserved. - * Use is subject to license terms. - * - * Redistribution and use in source and binary forms, with or without modification, are - * permitted provided that the following conditions are met: Redistributions of source code - * must retain the above copyright notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, this list of - * conditions and the following disclaimer in the documentation and/or other materials - * provided with the distribution. Neither the name of the Sun Microsystems nor the names of - * is contributors may be used to endorse or promote products derived from this software - * without specific prior written permission. - - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS - * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY - * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER - * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON - * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE - * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index 85f57694ce6..98ed5695973 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -44,7 +44,14 @@ import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.script.*; +import org.elasticsearch.script.ClassPermission; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.ScoreAccessor; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.ScriptException; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; @@ -165,7 +172,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { try { // we reuse classloader, so do a security check just in case. SecurityManager sm = System.getSecurityManager(); diff --git a/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy b/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy index e1fd920d119..4ada1ad5f38 100644 --- a/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy +++ b/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy @@ -34,7 +34,6 @@ grant { permission org.elasticsearch.script.ClassPermission "<>"; // groovy runtime (TODO: clean these up if possible) permission org.elasticsearch.script.ClassPermission "groovy.grape.GrabAnnotationTransformation"; - permission org.elasticsearch.script.ClassPermission "groovy.json.JsonOutput"; permission org.elasticsearch.script.ClassPermission "groovy.lang.Binding"; permission org.elasticsearch.script.ClassPermission "groovy.lang.GroovyObject"; permission org.elasticsearch.script.ClassPermission "groovy.lang.GString"; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java index 2883b74cc1d..a1faea0b5e5 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java @@ -45,12 +45,14 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.having; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.SuiteScopeTestCase public class BucketSelectorTests extends ESIntegTestCase { @@ -74,6 +76,7 @@ public class BucketSelectorTests extends ESIntegTestCase { public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); + createIndex("idx_with_gaps"); interval = randomIntBetween(1, 50); numDocs = randomIntBetween(10, 500); @@ -84,6 +87,10 @@ public class BucketSelectorTests extends ESIntegTestCase { for (int docs = 0; docs < numDocs; docs++) { builders.add(client().prepareIndex("idx", "type").setSource(newDocBuilder())); } + builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(1, 1, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(1, 2, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(3, 1, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(3, 3, 0, 0))); client().preparePutIndexedScript().setId("my_script").setScriptLang(GroovyScriptEngineService.NAME) .setSource("{ \"script\": \"Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)\" }").get(); @@ -93,12 +100,17 @@ public class BucketSelectorTests extends ESIntegTestCase { } private XContentBuilder newDocBuilder() throws IOException { + return newDocBuilder(randomIntBetween(minNumber, maxNumber), randomIntBetween(minNumber, maxNumber), + randomIntBetween(minNumber, maxNumber), randomIntBetween(minNumber, maxNumber)); + } + + private XContentBuilder newDocBuilder(int field1Value, int field2Value, int field3Value, int field4Value) throws IOException { XContentBuilder jsonBuilder = jsonBuilder(); jsonBuilder.startObject(); - jsonBuilder.field(FIELD_1_NAME, randomIntBetween(minNumber, maxNumber)); - jsonBuilder.field(FIELD_2_NAME, randomIntBetween(minNumber, maxNumber)); - jsonBuilder.field(FIELD_3_NAME, randomIntBetween(minNumber, maxNumber)); - jsonBuilder.field(FIELD_4_NAME, randomIntBetween(minNumber, maxNumber)); + jsonBuilder.field(FIELD_1_NAME, field1Value); + jsonBuilder.field(FIELD_2_NAME, field2Value); + jsonBuilder.field(FIELD_3_NAME, field3Value); + jsonBuilder.field(FIELD_4_NAME, field4Value); jsonBuilder.endObject(); return jsonBuilder; } @@ -451,4 +463,70 @@ public class BucketSelectorTests extends ESIntegTestCase { assertThat(field2SumValue + field3SumValue, greaterThan(100.0)); } } + + public void testEmptyBuckets() { + SearchResponse response = client().prepareSearch("idx_with_gaps") + .addAggregation(histogram("histo").field(FIELD_1_NAME).interval(1) + .subAggregation(histogram("inner_histo").field(FIELD_1_NAME).interval(1).extendedBounds(1l, 4l).minDocCount(0) + .subAggregation(derivative("derivative").setBucketsPaths("_count").gapPolicy(GapPolicy.INSERT_ZEROS)))) + .execute().actionGet(); + + assertSearchResponse(response); + + InternalHistogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat(buckets.size(), equalTo(3)); + + Histogram.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKeyAsString(), equalTo("1")); + Histogram innerHisto = bucket.getAggregations().get("inner_histo"); + assertThat(innerHisto, notNullValue()); + List innerBuckets = innerHisto.getBuckets(); + assertThat(innerBuckets, notNullValue()); + assertThat(innerBuckets.size(), equalTo(4)); + for (int i = 0; i < innerBuckets.size(); i++) { + Histogram.Bucket innerBucket = innerBuckets.get(i); + if (i == 0) { + assertThat(innerBucket.getAggregations().get("derivative"), nullValue()); + } else { + assertThat(innerBucket.getAggregations().get("derivative"), notNullValue()); + } + } + + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKeyAsString(), equalTo("2")); + innerHisto = bucket.getAggregations().get("inner_histo"); + assertThat(innerHisto, notNullValue()); + innerBuckets = innerHisto.getBuckets(); + assertThat(innerBuckets, notNullValue()); + assertThat(innerBuckets.size(), equalTo(4)); + for (int i = 0; i < innerBuckets.size(); i++) { + Histogram.Bucket innerBucket = innerBuckets.get(i); + if (i == 0) { + assertThat(innerBucket.getAggregations().get("derivative"), nullValue()); + } else { + assertThat(innerBucket.getAggregations().get("derivative"), notNullValue()); + } + } + bucket = buckets.get(2); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKeyAsString(), equalTo("3")); + innerHisto = bucket.getAggregations().get("inner_histo"); + assertThat(innerHisto, notNullValue()); + innerBuckets = innerHisto.getBuckets(); + assertThat(innerBuckets, notNullValue()); + assertThat(innerBuckets.size(), equalTo(4)); + for (int i = 0; i < innerBuckets.size(); i++) { + Histogram.Bucket innerBucket = innerBuckets.get(i); + if (i == 0) { + assertThat(innerBucket.getAggregations().get("derivative"), nullValue()); + } else { + assertThat(innerBucket.getAggregations().get("derivative"), notNullValue()); + } + } + } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java index d893b2767ca..f05938b4c98 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java @@ -19,13 +19,11 @@ package org.elasticsearch.messy.tests; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; @@ -34,10 +32,8 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; @@ -46,7 +42,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.test.ESIntegTestCase; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -54,7 +49,6 @@ import java.util.concurrent.CyclicBarrier; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertExists; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; @@ -555,86 +549,6 @@ public class BulkTests extends ESIntegTestCase { assertThat(successes, equalTo(1)); } - // issue 4745 - public void testPreParsingSourceDueToMappingShouldNotBreakCompleteBulkRequest() throws Exception { - XContentBuilder builder = jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp") - .field("enabled", true) - .field("path", "last_modified") - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate("test").addMapping("type", builder) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID)); - - String brokenBuildRequestData = "{\"index\": {\"_id\": \"1\"}}\n" + - "{\"name\": \"Malformed}\n" + - "{\"index\": {\"_id\": \"2\"}}\n" + - "{\"name\": \"Good\", \"last_modified\" : \"2013-04-05\"}\n"; - - BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get(); - assertThat(bulkResponse.getItems().length, is(2)); - assertThat(bulkResponse.getItems()[0].isFailed(), is(true)); - assertThat(bulkResponse.getItems()[1].isFailed(), is(false)); - - assertExists(get("test", "type", "2")); - } - - // issue 4745 - public void testPreParsingSourceDueToRoutingShouldNotBreakCompleteBulkRequest() throws Exception { - XContentBuilder builder = jsonBuilder().startObject() - .startObject("type") - .startObject("_routing") - .field("required", true) - .field("path", "my_routing") - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate("test").addMapping("type", builder) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID)); - ensureYellow("test"); - - String brokenBuildRequestData = "{\"index\": {} }\n" + - "{\"name\": \"Malformed}\n" + - "{\"index\": { \"_id\" : \"24000\" } }\n" + - "{\"name\": \"Good\", \"my_routing\" : \"48000\"}\n"; - - BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get(); - assertThat(bulkResponse.getItems().length, is(2)); - assertThat(bulkResponse.getItems()[0].isFailed(), is(true)); - assertThat(bulkResponse.getItems()[1].isFailed(), is(false)); - - assertExists(client().prepareGet("test", "type", "24000").setRouting("48000").get()); - } - - - // issue 4745 - public void testPreParsingSourceDueToIdShouldNotBreakCompleteBulkRequest() throws Exception { - XContentBuilder builder = jsonBuilder().startObject() - .startObject("type") - .startObject("_id") - .field("path", "my_id") - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate("test").addMapping("type", builder) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID)); - ensureYellow("test"); - - String brokenBuildRequestData = "{\"index\": {} }\n" + - "{\"name\": \"Malformed}\n" + - "{\"index\": {} }\n" + - "{\"name\": \"Good\", \"my_id\" : \"48\"}\n"; - - BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get(); - assertThat(bulkResponse.getItems().length, is(2)); - assertThat(bulkResponse.getItems()[0].isFailed(), is(true)); - assertThat(bulkResponse.getItems()[1].isFailed(), is(false)); - - assertExists(get("test", "type", "48")); - } - // issue 4987 public void testThatInvalidIndexNamesShouldNotBreakCompleteBulkRequest() { int bulkEntryCount = randomIntBetween(10, 50); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java index 728a932d2b5..9b2e0041462 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java @@ -78,14 +78,11 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.node.Node.HTTP_ENABLED; import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java index 74bef40b713..584a8d2c284 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.IntHashSet; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -78,7 +77,7 @@ public class EquivalenceTests extends ESIntegTestCase { protected Collection> nodePlugins() { return Collections.singleton(GroovyPlugin.class); } - + // Make sure that unordered, reversed, disjoint and/or overlapping ranges are supported // Duel with filters public void testRandomRanges() throws Exception { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java index 37132c5a923..98a23b3e1fd 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java @@ -77,7 +77,7 @@ public class GeoShapeIntegrationTests extends ESIntegTestCase { // left orientation test IndicesService indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName)); IndexService indexService = indicesService.indexService(idxName); - MappedFieldType fieldType = indexService.mapperService().smartNameFieldType("location"); + MappedFieldType fieldType = indexService.mapperService().fullName("location"); assertThat(fieldType, instanceOf(GeoShapeFieldMapper.GeoShapeFieldType.class)); GeoShapeFieldMapper.GeoShapeFieldType gsfm = (GeoShapeFieldMapper.GeoShapeFieldType)fieldType; @@ -89,7 +89,7 @@ public class GeoShapeIntegrationTests extends ESIntegTestCase { // right orientation test indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName+"2")); indexService = indicesService.indexService(idxName+"2"); - fieldType = indexService.mapperService().smartNameFieldType("location"); + fieldType = indexService.mapperService().fullName("location"); assertThat(fieldType, instanceOf(GeoShapeFieldMapper.GeoShapeFieldType.class)); gsfm = (GeoShapeFieldMapper.GeoShapeFieldType)fieldType; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java index dd3d2e99fcd..b8c6f6dcc7f 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.LongHashSet; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java index 66a764dd75a..9a3a4632c6f 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java @@ -85,26 +85,44 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.action.SearchServiceTransportAction; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportService; import org.junit.After; import org.junit.Before; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.function.Supplier; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.instanceOf; @ClusterScope(scope = Scope.SUITE, numClientNodes = 1, minNumDataNodes = 2) public class IndicesRequestTests extends ESIntegTestCase { @@ -127,7 +145,7 @@ public class IndicesRequestTests extends ESIntegTestCase { protected Settings nodeSettings(int ordinal) { // must set this independently of the plugin so it overrides MockTransportService return Settings.builder().put(super.nodeSettings(ordinal)) - .put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, "intercepting").build(); + .put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "intercepting").build(); } @Override @@ -756,8 +774,8 @@ public class IndicesRequestTests extends ESIntegTestCase { public String description() { return "an intercepting transport service for testing"; } - public void onModule(TransportModule transportModule) { - transportModule.addTransportService("intercepting", InterceptingTransportService.class); + public void onModule(NetworkModule module) { + module.registerTransportService("intercepting", InterceptingTransportService.class); } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java index de24124669a..a8f78c62c77 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -65,7 +64,7 @@ public class MinDocCountTests extends AbstractTermsTestCase { protected Collection> nodePlugins() { return Collections.singleton(GroovyPlugin.class); } - + private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); private static int cardinality; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java index 42141e6afb0..7a6ffa5edf0 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java @@ -38,11 +38,21 @@ import java.util.HashMap; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.*; -import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.*; +import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.nullValue; public class RandomScoreFunctionTests extends ESIntegTestCase { @@ -50,7 +60,7 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { protected Collection> nodePlugins() { return Collections.singleton(GroovyPlugin.class); } - + public void testConsistentHitsWithSameSeed() throws Exception { createIndex("test"); ensureGreen(); // make sure we are done otherwise preference could change? @@ -244,7 +254,7 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { } } } - + public void testSeeds() throws Exception { createIndex("test"); ensureGreen(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java index c54510acd4e..98d53c85174 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java @@ -58,7 +58,6 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; @ClusterScope(scope = Scope.SUITE) @@ -739,6 +738,10 @@ public class ScriptedMetricTests extends ESIntegTestCase { ScriptedMetric scriptedMetric = bucket.getAggregations().get("scripted"); assertThat(scriptedMetric, notNullValue()); assertThat(scriptedMetric.getName(), equalTo("scripted")); - assertThat(scriptedMetric.aggregation(), nullValue()); + assertThat(scriptedMetric.aggregation(), notNullValue()); + assertThat(scriptedMetric.aggregation(), instanceOf(List.class)); + List aggregationResult = (List) scriptedMetric.aggregation(); + assertThat(aggregationResult.size(), equalTo(1)); + assertThat(aggregationResult.get(0), equalTo(0)); } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java index 8153d207b7c..5a56e0f6999 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java @@ -392,8 +392,7 @@ public class SearchFieldsTests extends ESIntegTestCase { createIndex("test"); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("_source").field("enabled", false).endObject() + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties") .startObject("byte_field").field("type", "byte").field("store", "yes").endObject() .startObject("short_field").field("type", "short").field("store", "yes").endObject() .startObject("integer_field").field("type", "integer").field("store", "yes").endObject() @@ -556,8 +555,7 @@ public class SearchFieldsTests extends ESIntegTestCase { createIndex("test"); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("_source").field("enabled", false).endObject() + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties") .startObject("string_field").field("type", "string").endObject() .startObject("byte_field").field("type", "byte").endObject() .startObject("short_field").field("type", "short").endObject() diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java index db8a13c5ab8..8d959022412 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.StringAndBytesText; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -48,24 +47,57 @@ import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.sort.*; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.GeoDistanceSortBuilder; +import org.elasticsearch.search.sort.ScriptSortBuilder; +import org.elasticsearch.search.sort.SortBuilders; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.junit.annotations.TestLogging; import org.hamcrest.Matchers; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; import java.util.Map.Entry; +import java.util.Random; +import java.util.Set; +import java.util.TreeMap; import java.util.concurrent.ExecutionException; import static org.apache.lucene.util.GeoUtils.TOLERANCE; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.*; +import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction; import static org.elasticsearch.search.sort.SortBuilders.fieldSort; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSortValues; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; /** * @@ -1633,8 +1665,7 @@ public class SimpleSortTests extends ESIntegTestCase { assertThat(hits[i].getSortValues().length, is(1)); Object o = hits[i].getSortValues()[0]; assertThat(o, notNullValue()); - assertThat(o instanceof StringAndBytesText, is(true)); - StringAndBytesText text = (StringAndBytesText) o; + Text text = (Text) o; assertThat(text.string(), is("bar")); } @@ -1650,8 +1681,7 @@ public class SimpleSortTests extends ESIntegTestCase { assertThat(hits[i].getSortValues().length, is(1)); Object o = hits[i].getSortValues()[0]; assertThat(o, notNullValue()); - assertThat(o instanceof StringAndBytesText, is(true)); - StringAndBytesText text = (StringAndBytesText) o; + Text text = (Text) o; assertThat(text.string(), is("bar bar")); } } @@ -1925,7 +1955,7 @@ public class SimpleSortTests extends ESIntegTestCase { .addSort(fieldSort("str_field2").order(SortOrder.DESC).unmappedType("string")).get(); assertSortValues(resp, - new Object[] {new StringAndBytesText("bcd"), null}, + new Object[] {new Text("bcd"), null}, new Object[] {null, null}); resp = client().prepareSearch("test1", "test2") diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java index 5f91631c021..f5c44c6eea1 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.script.groovy; +import groovy.lang.MissingPropertyException; import org.apache.lucene.util.Constants; import org.codehaus.groovy.control.MultipleCompilationErrorsException; import org.elasticsearch.common.settings.Settings; @@ -27,8 +28,6 @@ import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; -import groovy.lang.MissingPropertyException; - import java.nio.file.Path; import java.security.PrivilegedActionException; import java.util.AbstractMap; @@ -84,8 +83,6 @@ public class GroovySecurityTests extends ESTestCase { assertSuccess("def range = 1..doc['foo'].value; def v = range.get(0)"); // Maps assertSuccess("def v = doc['foo'].value; def m = [:]; m.put(\"value\", v)"); - // serialization to json (this is best effort considering the unsafe etc at play) - assertSuccess("def x = 5; groovy.json.JsonOutput.toJson(x)"); // Times assertSuccess("def t = Instant.now().getMillis()"); // GroovyCollections @@ -99,7 +96,7 @@ public class GroovySecurityTests extends ESTestCase { // filtered directly by our classloader assertFailure("getClass().getClassLoader().loadClass(\"java.lang.Runtime\").availableProcessors()", PrivilegedActionException.class); // unfortunately, we have access to other classloaders (due to indy mechanism needing getClassLoader permission) - // but we can't do much with them directly at least. + // but we can't do much with them directly at least. assertFailure("myobject.getClass().getClassLoader().loadClass(\"java.lang.Runtime\").availableProcessors()", SecurityException.class); assertFailure("d = new DateTime(); d.getClass().getDeclaredMethod(\"year\").setAccessible(true)", SecurityException.class); assertFailure("d = new DateTime(); d.\"${'get' + 'Class'}\"()." + @@ -133,9 +130,9 @@ public class GroovySecurityTests extends ESTestCase { vars.put("myarray", Arrays.asList("foo")); vars.put("myobject", new MyObject()); - se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script)), vars).run(); + se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script, Collections.emptyMap())), vars).run(); } - + public static class MyObject { public int getPrimitive() { return 0; } public Object getObject() { return "value"; } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java index 93172056071..f43856cb7b5 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java @@ -19,7 +19,6 @@ package org.elasticsearch.script.mustache; import com.github.mustachejava.Mustache; - import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; @@ -85,7 +84,7 @@ public class MustacheScriptEngineService extends AbstractComponent implements Sc * @return a compiled template object for later execution. * */ @Override - public Object compile(String template) { + public Object compile(String template, Map params) { /** Factory to generate Mustache objects from. */ return (new JsonEscapingMustacheFactory()).compile(new FastStringReader(template), "query-template"); } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java index 87cc51c2ec2..4b3d3f3ff98 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.messy.tests; import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -32,7 +31,6 @@ import org.elasticsearch.script.Template; import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.rest.support.FileUtils; import java.nio.file.Files; import java.nio.file.Path; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index a0699a35534..92bf7d03484 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -82,12 +82,12 @@ import static org.hamcrest.Matchers.nullValue; * request, modify again, request again, etc. This makes it very obvious what changes between requests. */ public class SuggestSearchTests extends ESIntegTestCase { - + @Override protected Collection> nodePlugins() { return Collections.singleton(MustachePlugin.class); } - + // see #3196 public void testSuggestAcrossMultipleIndices() throws IOException { createIndex("test"); @@ -193,11 +193,8 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("name") - .field("type", "multi_field") + .field("type", "string") .startObject("fields") - .startObject("name") - .field("type", "string") - .endObject() .startObject("shingled") .field("type", "string") .field("analyzer", "biword") @@ -267,11 +264,8 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("name") - .field("type", "multi_field") + .field("type", "string") .startObject("fields") - .startObject("name") - .field("type", "string") - .endObject() .startObject("shingled") .field("type", "string") .field("analyzer", "biword") @@ -618,7 +612,7 @@ public class SuggestSearchTests extends ESIntegTestCase { // Check the name this time because we're repeating it which is funky assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel")); } - + private List readMarvelHeroNames() throws IOException, URISyntaxException { return Files.readAllLines(PathUtils.get(Suggest.class.getResource("/config/names.txt").toURI()), StandardCharsets.UTF_8); } @@ -808,13 +802,8 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties") .startObject("name") - .field("type", "multi_field") - .startObject("fields") - .startObject("name") - .field("type", "string") - .field("analyzer", "suggest") - .endObject() - .endObject() + .field("type", "string") + .field("analyzer", "suggest") .endObject() .endObject() .endObject().endObject(); @@ -855,13 +844,8 @@ public class SuggestSearchTests extends ESIntegTestCase { startObject("type1"). startObject("properties"). startObject("name"). - field("type", "multi_field"). - startObject("fields"). - startObject("name"). - field("type", "string"). - field("analyzer", "suggest"). - endObject(). - endObject(). + field("type", "string"). + field("analyzer", "suggest"). endObject(). endObject(). endObject(). @@ -1166,11 +1150,12 @@ public class SuggestSearchTests extends ESIntegTestCase { String filterString = XContentFactory.jsonBuilder() .startObject() .startObject("match_phrase") - .field("title", "{{suggestion}}") + .field("{{field}}", "{{suggestion}}") .endObject() .endObject() .string(); PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString); + filteredQuerySuggest.collateParams(Collections.singletonMap("field", "title")); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", filteredQuerySuggest); assertSuggestionSize(searchSuggest, 0, 2, "title"); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index ce29bf246be..8e8c8981493 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -28,6 +28,7 @@ import org.junit.Before; import java.io.IOException; import java.io.StringWriter; import java.nio.charset.Charset; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -52,7 +53,7 @@ public class MustacheScriptEngineTests extends ESTestCase { + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" + "}}, \"negative_boost\": {{boost_val}} } }}"; Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); - BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template)), vars).run(); + BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, Collections.emptyMap())), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.3 } }}", new String(o.toBytes(), Charset.forName("UTF-8"))); @@ -63,7 +64,7 @@ public class MustacheScriptEngineTests extends ESTestCase { Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); vars.put("body_val", "\"quick brown\""); - BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template)), vars).run(); + BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, Collections.emptyMap())), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"\\\"quick brown\\\"\"}}}, \"negative_boost\": 0.3 } }}", new String(o.toBytes(), Charset.forName("UTF-8"))); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index 76c867802a9..d8cf7732378 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.script.mustache; import com.github.mustachejava.DefaultMustacheFactory; import com.github.mustachejava.Mustache; import com.github.mustachejava.MustacheFactory; - import org.elasticsearch.test.ESTestCase; import java.io.StringReader; diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1719088.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 4942bbc6af3..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4e56ba76d6b23756b2bd4d9e42b2b00122cd4fa5 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..84b4b753063 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +1fce4e9b5c4482bb95e8b275c825d112640d6f1e \ No newline at end of file diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/ICUCollationKeyFilter.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/ICUCollationKeyFilter.java index 674ae8b8f12..43165555430 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/ICUCollationKeyFilter.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/ICUCollationKeyFilter.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Collator; import com.ibm.icu.text.RawCollationKey; - import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; @@ -36,7 +35,7 @@ import java.io.IOException; *

      * WARNING: Make sure you use exactly the same Collator at * index and query time -- CollationKeys are only comparable when produced by - * the same Collator. {@link com.ibm.icu.text.RuleBasedCollator}s are + * the same Collator. {@link com.ibm.icu.text.RuleBasedCollator}s are * independently versioned, so it is safe to search against stored * CollationKeys if the following are exactly the same (best practice is * to store this information with the index and check that they remain the @@ -49,11 +48,11 @@ import java.io.IOException; *

    • * The collation strength used - see {@link Collator#setStrength(int)} *
    • - * + * *

      * CollationKeys generated by ICU Collators are not compatible with those - * generated by java.text.Collators. Specifically, if you use - * ICUCollationKeyFilter to generate index terms, do not use + * generated by java.text.Collators. Specifically, if you use + * ICUCollationKeyFilter to generate index terms, do not use * {@code CollationKeyFilter} on the query side, or vice versa. *

      *

      @@ -74,7 +73,7 @@ public final class ICUCollationKeyFilter extends TokenFilter { private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); /** - * + * * @param input Source token stream * @param collator CollationKey generator */ diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java index 51243856a1f..b31502cdd7d 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java @@ -19,19 +19,18 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.icu.ICUFoldingFilter; -import org.elasticsearch.common.settings.Settings; - import com.ibm.icu.text.FilteredNormalizer2; import com.ibm.icu.text.Normalizer2; import com.ibm.icu.text.UnicodeSet; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.icu.ICUFoldingFilter; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; /** - * Uses the {@link org.apache.lucene.analysis.icu.ICUFoldingFilter}. + * Uses the {@link org.apache.lucene.analysis.icu.ICUFoldingFilter}. * Applies foldings from UTR#30 Character Foldings. *

      * Can be filtered to handle certain characters in a specified way (see http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html) @@ -54,7 +53,7 @@ public class IcuFoldingTokenFilterFactory extends AbstractTokenFilterFactory { // The ICUFoldingFilter is in fact implemented as a ICUNormalizer2Filter. // ICUFoldingFilter lacks a constructor for adding filtering so we implemement it here - if (unicodeSetFilter != null) { + if (unicodeSetFilter != null) { Normalizer2 base = Normalizer2.getInstance( ICUFoldingFilter.class.getResourceAsStream("utr30.nrm"), "utr30", Normalizer2.Mode.COMPOSE); @@ -62,7 +61,7 @@ public class IcuFoldingTokenFilterFactory extends AbstractTokenFilterFactory { unicodeSet.freeze(); Normalizer2 filtered = new FilteredNormalizer2(base, unicodeSet); - return new org.apache.lucene.analysis.icu.ICUNormalizer2Filter(tokenStream, filtered); + return new org.apache.lucene.analysis.icu.ICUNormalizer2Filter(tokenStream, filtered); } else { return new ICUFoldingFilter(tokenStream); diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java index 6f830b29d15..4833e887153 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java @@ -21,8 +21,6 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Normalizer2; import org.apache.lucene.analysis.TokenStream; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java index 1d5136f60e1..6ecdf3888e9 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java @@ -23,7 +23,6 @@ import com.ibm.icu.text.Transliterator; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.icu.ICUTransformFilter; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IndexableBinaryStringTools.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IndexableBinaryStringTools.java index 82be3c66159..38b5da309be 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IndexableBinaryStringTools.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IndexableBinaryStringTools.java @@ -17,7 +17,7 @@ package org.elasticsearch.index.analysis; * limitations under the License. */ -import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; // javadoc +import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; /** * Provides support for converting byte sequences to Strings and back again. @@ -26,7 +26,7 @@ import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; // ja * The Strings are constructed using a Base 8000h encoding of the original * binary data - each char of an encoded String represents a 15-bit chunk * from the byte sequence. Base 8000h was chosen because it allows for all - * lower 15 bits of char to be used without restriction; the surrogate range + * lower 15 bits of char to be used without restriction; the surrogate range * [U+D8000-U+DFFF] does not represent valid chars, and would require * complicated handling to avoid them and allow use of char's high bit. *

      @@ -61,7 +61,7 @@ public final class IndexableBinaryStringTools { /** * Returns the number of chars required to encode the given bytes. - * + * * @param inputArray byte sequence to be encoded * @param inputOffset initial offset into inputArray * @param inputLength number of bytes in inputArray @@ -75,7 +75,7 @@ public final class IndexableBinaryStringTools { /** * Returns the number of bytes required to decode the given char sequence. - * + * * @param encoded char sequence to be decoded * @param offset initial offset * @param length number of characters @@ -97,7 +97,7 @@ public final class IndexableBinaryStringTools { * Encodes the input byte sequence into the output char sequence. Before * calling this method, ensure that the output array has sufficient * capacity by calling {@link #getEncodedLength(byte[], int, int)}. - * + * * @param inputArray byte sequence to be encoded * @param inputOffset initial offset into inputArray * @param inputLength number of bytes in inputArray @@ -151,7 +151,7 @@ public final class IndexableBinaryStringTools { * Decodes the input char sequence into the output byte sequence. Before * calling this method, ensure that the output array has sufficient capacity * by calling {@link #getDecodedLength(char[], int, int)}. - * + * * @param inputArray char sequence to be decoded * @param inputOffset initial offset into inputArray * @param inputLength number of chars in inputArray @@ -233,7 +233,7 @@ public final class IndexableBinaryStringTools { this.finalShift = finalShift; this.finalMask = (short)((short)0xFF >>> finalShift); if (finalShift != 0) { - advanceBytes = 1; + advanceBytes = 1; } } } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java index 47c2f8f051a..46b8d530f5f 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java @@ -19,7 +19,12 @@ package org.elasticsearch.plugin.analysis.icu; -import org.elasticsearch.index.analysis.*; +import org.elasticsearch.index.analysis.IcuCollationTokenFilterFactory; +import org.elasticsearch.index.analysis.IcuFoldingTokenFilterFactory; +import org.elasticsearch.index.analysis.IcuNormalizerCharFilterFactory; +import org.elasticsearch.index.analysis.IcuNormalizerTokenFilterFactory; +import org.elasticsearch.index.analysis.IcuTokenizerFactory; +import org.elasticsearch.index.analysis.IcuTransformTokenFilterFactory; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugins.Plugin; diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java index 9e59b8e42c3..33c1f337dbd 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Collator; import com.ibm.icu.text.RuleBasedCollator; import com.ibm.icu.util.ULocale; - import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.KeywordTokenizer; diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java index bd2f959bf9c..acdbd9d4dfc 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Normalizer2; - import org.apache.lucene.analysis.CharFilter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1719088.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 1ba2a93066d..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d6ccac802dc1e4c177be043a173377cf5e517cff \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..429f8b59b3e --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +f104f306fef9d3033db026705043e9cbd145aba5 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java index f363cf0c15c..e191d78198f 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ja.JapaneseBaseFormFilter; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java index 7d25ca03fdb..ebebdcb6bba 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ja.JapaneseKatakanaStemFilter; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java index 1d4ecc2c33d..59d1088fd1b 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ja.JapaneseReadingFormFilter; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java index fde7d3d5964..6c0a15f2e39 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java @@ -19,7 +19,6 @@ package org.elasticsearch.plugin.analysis.kuromoji; -import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.index.analysis.JapaneseStopTokenFilterFactory; import org.elasticsearch.index.analysis.KuromojiAnalyzerProvider; import org.elasticsearch.index.analysis.KuromojiBaseFormFilterFactory; @@ -28,6 +27,7 @@ import org.elasticsearch.index.analysis.KuromojiKatakanaStemmerFactory; import org.elasticsearch.index.analysis.KuromojiPartOfSpeechFilterFactory; import org.elasticsearch.index.analysis.KuromojiReadingFormFilterFactory; import org.elasticsearch.index.analysis.KuromojiTokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugins.Plugin; /** diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java index 0942be5399a..3adb8202833 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java @@ -46,7 +46,10 @@ import java.io.StringReader; import java.nio.file.Files; import java.nio.file.Path; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; /** */ diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1719088.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 2b611862d41..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -70ad9f6c3738727229867419d949527cc7789f62 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..a814cf5cb03 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +40b2034a6aed4c3fe0509016fab4f7bbb37a5fc8 \ No newline at end of file diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java index 37f7e0cd214..9374410765d 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java @@ -20,7 +20,13 @@ package org.elasticsearch.index.analysis; import org.apache.commons.codec.Encoder; -import org.apache.commons.codec.language.*; +import org.apache.commons.codec.language.Caverphone1; +import org.apache.commons.codec.language.Caverphone2; +import org.apache.commons.codec.language.ColognePhonetic; +import org.apache.commons.codec.language.DaitchMokotoffSoundex; +import org.apache.commons.codec.language.Metaphone; +import org.apache.commons.codec.language.RefinedSoundex; +import org.apache.commons.codec.language.Soundex; import org.apache.commons.codec.language.bm.Languages.LanguageSet; import org.apache.commons.codec.language.bm.NameType; import org.apache.commons.codec.language.bm.PhoneticEngine; @@ -61,7 +67,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory { this.maxcodelength = 0; this.replace = settings.getAsBoolean("replace", true); // weird, encoder is null at last step in SimplePhoneticAnalysisTests, so we set it to metaphone as default - String encodername = settings.get("encoder", "metaphone"); + String encodername = settings.get("encoder", "metaphone"); if ("metaphone".equalsIgnoreCase(encodername)) { this.encoder = new Metaphone(); } else if ("soundex".equalsIgnoreCase(encodername)) { diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/KoelnerPhonetik.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/KoelnerPhonetik.java index e70722c9b48..57195062cdd 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/KoelnerPhonetik.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/KoelnerPhonetik.java @@ -22,7 +22,12 @@ package org.elasticsearch.index.analysis.phonetic; import org.apache.commons.codec.EncoderException; import org.apache.commons.codec.StringEncoder; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -36,7 +41,7 @@ import java.util.regex.Pattern; * Java-Programmierung von Jörg Reiher * * mit Änderungen von Jörg Prante - * + * */ public class KoelnerPhonetik implements StringEncoder { @@ -59,7 +64,7 @@ public class KoelnerPhonetik implements StringEncoder { public KoelnerPhonetik() { init(); } - + public KoelnerPhonetik(boolean useOnlyPrimaryCode) { this(); this.primary = useOnlyPrimaryCode; @@ -67,7 +72,7 @@ public class KoelnerPhonetik implements StringEncoder { /** * Get variation patterns - * + * * @return string array of variations */ protected String[] getPatterns() { @@ -99,7 +104,7 @@ public class KoelnerPhonetik implements StringEncoder { public Object encode(Object str) throws EncoderException { return encode((String) str); } - + @Override public String encode(String str) throws EncoderException { if (str == null) return null; @@ -114,14 +119,14 @@ public class KoelnerPhonetik implements StringEncoder { return sb.toString(); } - + private void init() { this.variationsPatterns = new Pattern[getPatterns().length]; for (int i = 0; i < getPatterns().length; i++) { this.variationsPatterns[i] = Pattern.compile(getPatterns()[i]); } } - + private String[] code(String str) { List parts = partition(str); String[] codes = new String[parts.size()]; diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1719088.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index e28887afd56..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -75504fd906929700e7d11f9600e4a79de48e1090 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..af3c4a277ea --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +e117a87f4338be80b0a052d2ce454d5086aa57f1 \ No newline at end of file diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java index 1daaa4b0a3d..22fcf238725 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseNoOpTokenFilterFactory.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseNoOpTokenFilterFactory.java index e0f9f556896..3f08f2e458c 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseNoOpTokenFilterFactory.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseNoOpTokenFilterFactory.java @@ -20,8 +20,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java index 7dade32f0e8..9d387296152 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java @@ -21,8 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.cn.smart.HMMChineseTokenizer; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1719088.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1719088.jar.sha1 deleted file mode 100644 index 739ecc4eb8f..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1719088.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9eeeeabeab89ec305e831d80bdcc7e85a1140fbb \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1721183.jar.sha1 new file mode 100644 index 00000000000..899769b0e29 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1721183.jar.sha1 @@ -0,0 +1 @@ +703dd91fccdc1c4662c80e412a449097c0578d83 \ No newline at end of file diff --git a/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStemTokenFilterFactory.java b/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStemTokenFilterFactory.java index 0ee789f66e9..afc7d527a6c 100644 --- a/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStemTokenFilterFactory.java +++ b/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishStemTokenFilterFactory.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.analysis.pl; -import org.apache.lucene.analysis.pl.PolishAnalyzer; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.pl.PolishAnalyzer; import org.apache.lucene.analysis.stempel.StempelFilter; import org.apache.lucene.analysis.stempel.StempelStemmer; import org.egothor.stemmer.Trie; diff --git a/plugins/build.gradle b/plugins/build.gradle index bdcc604a296..e49b08c6015 100644 --- a/plugins/build.gradle +++ b/plugins/build.gradle @@ -17,7 +17,8 @@ * under the License. */ -subprojects { +// only configure immediate children of plugins dir +configure(subprojects.findAll { it.parent.path == project.path }) { group = 'org.elasticsearch.plugin' apply plugin: 'elasticsearch.esplugin' diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java index e3faeb1badc..fa83fb4fd3d 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java @@ -239,6 +239,6 @@ public class DeleteByQueryRequest extends ActionRequest im ", size:" + size + ", timeout:" + timeout + ", routing:" + routing + - ", query:" + query.toString(); + ", query:" + query; } } diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java index df57aca1668..9fd42ae513a 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java @@ -27,7 +27,14 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.search.*; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; @@ -42,7 +49,10 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryPlugin.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryPlugin.java index b189745408f..8395223f669 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryPlugin.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryPlugin.java @@ -22,14 +22,10 @@ package org.elasticsearch.plugin.deletebyquery; import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.deletebyquery.DeleteByQueryAction; import org.elasticsearch.action.deletebyquery.TransportDeleteByQueryAction; -import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.RestModule; import org.elasticsearch.rest.action.deletebyquery.RestDeleteByQueryAction; -import java.util.Collection; -import java.util.Collections; - public class DeleteByQueryPlugin extends Plugin { public static final String NAME = "delete-by-query"; @@ -48,8 +44,8 @@ public class DeleteByQueryPlugin extends Plugin { actionModule.registerAction(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class); } - public void onModule(RestModule restModule) { - restModule.addRestAction(RestDeleteByQueryAction.class); + public void onModule(NetworkModule module) { + module.registerRestHandler(RestDeleteByQueryAction.class); } } diff --git a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java b/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java index c44608c4e4b..57bfa4c2328 100644 --- a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java +++ b/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.text.StringText; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.CountDown; @@ -339,7 +339,7 @@ public class TransportDeleteByQueryActionTests extends ESSingleNodeTestCase { final int nbDocs = randomIntBetween(0, 20); SearchHit[] docs = new SearchHit[nbDocs]; for (int i = 0; i < nbDocs; i++) { - InternalSearchHit doc = new InternalSearchHit(randomInt(), String.valueOf(i), new StringText("type"), null); + InternalSearchHit doc = new InternalSearchHit(randomInt(), String.valueOf(i), new Text("type"), null); doc.shard(new SearchShardTarget("node", "test", randomInt())); docs[i] = doc; } diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure/build.gradle index 5042824eb07..d85d08794ea 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure/build.gradle @@ -37,15 +37,12 @@ dependencies { compile "commons-lang:commons-lang:2.6" compile "commons-io:commons-io:2.4" compile 'javax.mail:mail:1.4.5' - compile 'javax.activation:activation:1.1' compile 'javax.inject:javax.inject:1' compile "com.sun.jersey:jersey-client:${versions.jersey}" compile "com.sun.jersey:jersey-core:${versions.jersey}" compile "com.sun.jersey:jersey-json:${versions.jersey}" compile 'org.codehaus.jettison:jettison:1.1' compile 'com.sun.xml.bind:jaxb-impl:2.2.3-1' - compile 'javax.xml.bind:jaxb-api:2.2.2' - compile 'javax.xml.stream:stax-api:1.0-2' compile 'org.codehaus.jackson:jackson-core-asl:1.9.2' compile 'org.codehaus.jackson:jackson-mapper-asl:1.9.2' compile 'org.codehaus.jackson:jackson-jaxrs:1.9.2' @@ -57,7 +54,6 @@ dependencyLicenses { mapping from: /jackson-.*/, to: 'jackson' mapping from: /jersey-.*/, to: 'jersey' mapping from: /jaxb-.*/, to: 'jaxb' - mapping from: /stax-.*/, to: 'stax' } compileJava.options.compilerArgs << '-Xlint:-path,-serial,-static,-unchecked' @@ -66,3 +62,38 @@ compileJava.options.compilerArgs << '-Xlint:-deprecation' // TODO: and why does this static not show up in maven... compileTestJava.options.compilerArgs << '-Xlint:-static' +thirdPartyAudit.excludes = [ + // classes are missing + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + 'org.eclipse.persistence.descriptors.ClassDescriptor', + 'org.eclipse.persistence.internal.oxm.MappingNodeValue', + 'org.eclipse.persistence.internal.oxm.TreeObjectBuilder', + 'org.eclipse.persistence.internal.oxm.XPathFragment', + 'org.eclipse.persistence.internal.oxm.XPathNode', + 'org.eclipse.persistence.internal.queries.ContainerPolicy', + 'org.eclipse.persistence.jaxb.JAXBContext', + 'org.eclipse.persistence.jaxb.JAXBHelper', + 'org.eclipse.persistence.mappings.DatabaseMapping', + 'org.eclipse.persistence.mappings.converters.TypeConversionConverter', + 'org.eclipse.persistence.mappings.foundation.AbstractCompositeDirectCollectionMapping', + 'org.eclipse.persistence.oxm.XMLContext', + 'org.eclipse.persistence.oxm.XMLDescriptor', + 'org.eclipse.persistence.oxm.XMLField', + 'org.eclipse.persistence.oxm.mappings.XMLCompositeCollectionMapping', + 'org.eclipse.persistence.sessions.DatabaseSession', + 'org.jvnet.fastinfoset.VocabularyApplicationData', + 'org.jvnet.staxex.Base64Data', + 'org.jvnet.staxex.XMLStreamReaderEx', + 'org.jvnet.staxex.XMLStreamWriterEx', + 'org.osgi.framework.Bundle', + 'org.osgi.framework.BundleActivator', + 'org.osgi.framework.BundleContext', + 'org.osgi.framework.BundleEvent', + 'org.osgi.framework.SynchronousBundleListener', + 'com.sun.xml.fastinfoset.stax.StAXDocumentParser', + 'com.sun.xml.fastinfoset.stax.StAXDocumentSerializer', +] diff --git a/plugins/discovery-azure/licenses/activation-1.1.jar.sha1 b/plugins/discovery-azure/licenses/activation-1.1.jar.sha1 deleted file mode 100644 index c4ee8fa5eb8..00000000000 --- a/plugins/discovery-azure/licenses/activation-1.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e6cb541461c2834bdea3eb920f1884d1eb508b50 diff --git a/plugins/discovery-azure/licenses/activation-LICENSE.txt b/plugins/discovery-azure/licenses/activation-LICENSE.txt deleted file mode 100644 index 1154e0aeec5..00000000000 --- a/plugins/discovery-azure/licenses/activation-LICENSE.txt +++ /dev/null @@ -1,119 +0,0 @@ -COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0 - -1. Definitions. - -1.1. Contributor means each individual or entity that creates or contributes to the creation of Modifications. - -1.2. Contributor Version means the combination of the Original Software, prior Modifications used by a Contributor (if any), and the Modifications made by that particular Contributor. - -1.3. Covered Software means (a) the Original Software, or (b) Modifications, or (c) the combination of files containing Original Software with files containing Modifications, in each case including portions thereof. - -1.4. Executable means the Covered Software in any form other than Source Code. - -1.5. Initial Developer means the individual or entity that first makes Original Software available under this License. - -1.6. Larger Work means a work which combines Covered Software or portions thereof with code not governed by the terms of this License. - -1.7. License means this document. - -1.8. Licensable means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently acquired, any and all of the rights conveyed herein. - -1.9. Modifications means the Source Code and Executable form of any of the following: - -A. Any file that results from an addition to, deletion from or modification of the contents of a file containing Original Software or previous Modifications; - -B. Any new file that contains any part of the Original Software or previous Modification; or - -C. Any new file that is contributed or otherwise made available under the terms of this License. - -1.10. Original Software means the Source Code and Executable form of computer software code that is originally released under this License. - -1.11. Patent Claims means any patent claim(s), now owned or hereafter acquired, including without limitation, method, process, and apparatus claims, in any patent Licensable by grantor. - -1.12. Source Code means (a) the common form of computer software code in which modifications are made and (b) associated documentation included in or with such code. - -1.13. You (or Your) means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, You includes any entity which controls, is controlled by, or is under common control with You. For purposes of this definition, control means (a)the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b)ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. - -2. License Grants. - -2.1. The Initial Developer Grant. -Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, the Initial Developer hereby grants You a world-wide, royalty-free, non-exclusive license: -(a) under intellectual property rights (other than patent or trademark) Licensable by Initial Developer, to use, reproduce, modify, display, perform, sublicense and distribute the Original Software (or portions thereof), with or without Modifications, and/or as part of a Larger Work; and -(b) under Patent Claims infringed by the making, using or selling of Original Software, to make, have made, use, practice, sell, and offer for sale, and/or otherwise dispose of the Original Software (or portions thereof). -(c) The licenses granted in Sections2.1(a) and (b) are effective on the date Initial Developer first distributes or otherwise makes the Original Software available to a third party under the terms of this License. -(d) Notwithstanding Section2.1(b) above, no patent license is granted: (1)for code that You delete from the Original Software, or (2)for infringements caused by: (i)the modification of the Original Software, or (ii)the combination of the Original Software with other software or devices. - -2.2. Contributor Grant. -Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: -(a) under intellectual property rights (other than patent or trademark) Licensable by Contributor to use, reproduce, modify, display, perform, sublicense and distribute the Modifications created by such Contributor (or portions thereof), either on an unmodified basis, with other Modifications, as Covered Software and/or as part of a Larger Work; and -(b) under Patent Claims infringed by the making, using, or selling of Modifications made by that Contributor either alone and/or in combination with its Contributor Version (or portions of such combination), to make, use, sell, offer for sale, have made, and/or otherwise dispose of: (1)Modifications made by that Contributor (or portions thereof); and (2)the combination of Modifications made by that Contributor with its Contributor Version (or portions of such combination). -(c) The licenses granted in Sections2.2(a) and 2.2(b) are effective on the date Contributor first distributes or otherwise makes the Modifications available to a third party. -(d) Notwithstanding Section2.2(b) above, no patent license is granted: (1)for any code that Contributor has deleted from the Contributor Version; (2)for infringements caused by: (i)third party modifications of Contributor Version, or (ii)the combination of Modifications made by that Contributor with other software (except as part of the Contributor Version) or other devices; or (3)under Patent Claims infringed by Covered Software in the absence of Modifications made by that Contributor. - -3. Distribution Obligations. - -3.1. Availability of Source Code. - -Any Covered Software that You distribute or otherwise make available in Executable form must also be made available in Source Code form and that Source Code form must be distributed only under the terms of this License. You must include a copy of this License with every copy of the Source Code form of the Covered Software You distribute or otherwise make available. You must inform recipients of any such Covered Software in Executable form as to how they can obtain such Covered Software in Source Code form in a reasonable manner on or through a medium customarily used for software exchange. - -3.2. Modifications. - -The Modifications that You create or to which You contribute are governed by the terms of this License. You represent that You believe Your Modifications are Your original creation(s) and/or You have sufficient rights to grant the rights conveyed by this License. - -3.3. Required Notices. -You must include a notice in each of Your Modifications that identifies You as the Contributor of the Modification. You may not remove or alter any copyright, patent or trademark notices contained within the Covered Software, or any notices of licensing or any descriptive text giving attribution to any Contributor or the Initial Developer. - -3.4. Application of Additional Terms. -You may not offer or impose any terms on any Covered Software in Source Code form that alters or restricts the applicable version of this License or the recipients rights hereunder. You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, you may do so only on Your own behalf, and not on behalf of the Initial Developer or any Contributor. You must make it absolutely clear that any such warranty, support, indemnity or liability obligation is offered by You alone, and You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of warranty, support, indemnity or liability terms You offer. - -3.5. Distribution of Executable Versions. -You may distribute the Executable form of the Covered Software under the terms of this License or under the terms of a license of Your choice, which may contain terms different from this License, provided that You are in compliance with the terms of this License and that the license for the Executable form does not attempt to limit or alter the recipients rights in the Source Code form from the rights set forth in this License. If You distribute the Covered Software in Executable form under a different license, You must make it absolutely clear that any terms which differ from this License are offered by You alone, not by the Initial Developer or Contributor. You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of any such terms You offer. - -3.6. Larger Works. -You may create a Larger Work by combining Covered Software with other code not governed by the terms of this License and distribute the Larger Work as a single product. In such a case, You must make sure the requirements of this License are fulfilled for the Covered Software. - -4. Versions of the License. - -4.1. New Versions. -Sun Microsystems, Inc. is the initial license steward and may publish revised and/or new versions of this License from time to time. Each version will be given a distinguishing version number. Except as provided in Section 4.3, no one other than the license steward has the right to modify this License. - -4.2. Effect of New Versions. - -You may always continue to use, distribute or otherwise make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. If the Initial Developer includes a notice in the Original Software prohibiting it from being distributed or otherwise made available under any subsequent version of the License, You must distribute and make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. Otherwise, You may also choose to use, distribute or otherwise make the Covered Software available under the terms of any subsequent version of the License published by the license steward. -4.3. Modified Versions. - -When You are an Initial Developer and You want to create a new license for Your Original Software, You may create and use a modified version of this License if You: (a)rename the license and remove any references to the name of the license steward (except to note that the license differs from this License); and (b)otherwise make it clear that the license contains terms which differ from this License. - -5. DISCLAIMER OF WARRANTY. - -COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN AS IS BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. - -6. TERMINATION. - -6.1. This License and the rights granted hereunder will terminate automatically if You fail to comply with terms herein and fail to cure such breach within 30 days of becoming aware of the breach. Provisions which, by their nature, must remain in effect beyond the termination of this License shall survive. - -6.2. If You assert a patent infringement claim (excluding declaratory judgment actions) against Initial Developer or a Contributor (the Initial Developer or Contributor against whom You assert such claim is referred to as Participant) alleging that the Participant Software (meaning the Contributor Version where the Participant is a Contributor or the Original Software where the Participant is the Initial Developer) directly or indirectly infringes any patent, then any and all rights granted directly or indirectly to You by such Participant, the Initial Developer (if the Initial Developer is not the Participant) and all Contributors under Sections2.1 and/or 2.2 of this License shall, upon 60 days notice from Participant terminate prospectively and automatically at the expiration of such 60 day notice period, unless if within such 60 day period You withdraw Your claim with respect to the Participant Software against such Participant either unilaterally or pursuant to a written agreement with Participant. - -6.3. In the event of termination under Sections6.1 or 6.2 above, all end user licenses that have been validly granted by You or any distributor hereunder prior to termination (excluding licenses granted to You by any distributor) shall survive termination. - -7. LIMITATION OF LIABILITY. - -UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOST PROFITS, LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH PARTYS NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. - -8. U.S. GOVERNMENT END USERS. - -The Covered Software is a commercial item, as that term is defined in 48C.F.R.2.101 (Oct. 1995), consisting of commercial computer software (as that term is defined at 48 C.F.R. 252.227-7014(a)(1)) and commercial computer software documentation as such terms are used in 48C.F.R.12.212 (Sept. 1995). Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all U.S. Government End Users acquire Covered Software with only those rights set forth herein. This U.S. Government Rights clause is in lieu of, and supersedes, any other FAR, DFAR, or other clause or provision that addresses Government rights in computer software under this License. - -9. MISCELLANEOUS. - -This License represents the complete agreement concerning subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. This License shall be governed by the law of the jurisdiction specified in a notice contained within the Original Software (except to the extent applicable law, if any, provides otherwise), excluding such jurisdictions conflict-of-law provisions. Any litigation relating to this License shall be subject to the jurisdiction of the courts located in the jurisdiction and venue specified in a notice contained within the Original Software, with the losing party responsible for costs, including, without limitation, court costs and reasonable attorneys fees and expenses. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not apply to this License. You agree that You alone are responsible for compliance with the United States export administration regulations (and the export control laws and regulation of any other countries) when You use, distribute or otherwise make available any Covered Software. - -10. RESPONSIBILITY FOR CLAIMS. - -As between Initial Developer and the Contributors, each party is responsible for claims and damages arising, directly or indirectly, out of its utilization of rights under this License and You agree to work with Initial Developer and Contributors to distribute such responsibility on an equitable basis. Nothing herein is intended or shall be deemed to constitute any admission of liability. - -NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) -The GlassFish code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. - - - diff --git a/plugins/discovery-azure/licenses/activation-NOTICE.txt b/plugins/discovery-azure/licenses/activation-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3f..00000000000 --- a/plugins/discovery-azure/licenses/activation-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 b/plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 deleted file mode 100644 index a145d47cec9..00000000000 --- a/plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -aeb3021ca93dde265796d82015beecdcff95bf09 diff --git a/plugins/discovery-azure/licenses/stax-NOTICE.txt b/plugins/discovery-azure/licenses/stax-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3f..00000000000 --- a/plugins/discovery-azure/licenses/stax-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/plugins/discovery-azure/licenses/stax-api-1.0-2.jar.sha1 b/plugins/discovery-azure/licenses/stax-api-1.0-2.jar.sha1 deleted file mode 100644 index fb00ad889b6..00000000000 --- a/plugins/discovery-azure/licenses/stax-api-1.0-2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d6337b0de8b25e53e81b922352fbea9f9f57ba0b diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java index 26406e3811c..39221ee6904 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java @@ -36,7 +36,10 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; -import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.*; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_PASSWORD; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_PATH; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_TYPE; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.SUBSCRIPTION_ID; /** * diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeSettingsFilter.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeSettingsFilter.java index c4a1837fdaf..a2851e70969 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeSettingsFilter.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeSettingsFilter.java @@ -24,7 +24,10 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.*; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_PASSWORD; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_PATH; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.KEYSTORE_TYPE; +import static org.elasticsearch.cloud.azure.management.AzureComputeService.Management.SUBSCRIPTION_ID; public class AzureComputeSettingsFilter extends AbstractComponent { diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java index 36b20b09fc1..89d6d17298f 100755 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureDiscovery.java @@ -22,12 +22,12 @@ package org.elasticsearch.discovery.azure; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.ZenPingService; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -40,9 +40,9 @@ public class AzureDiscovery extends ZenDiscovery { @Inject public AzureDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, NodeSettingsService nodeSettingsService, ZenPingService pingService, + ClusterService clusterService, ClusterSettings clusterSettings, ZenPingService pingService, DiscoverySettings discoverySettings, ElectMasterService electMasterService) { - super(settings, clusterName, threadPool, transportService, clusterService, nodeSettingsService, + super(settings, clusterName, threadPool, transportService, clusterService, clusterSettings, pingService, electMasterService, discoverySettings); } } diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java index 9f58b0bbb18..690ab623bd9 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java @@ -19,8 +19,11 @@ package org.elasticsearch.discovery.azure; -import com.microsoft.windowsazure.management.compute.models.*; - +import com.microsoft.windowsazure.management.compute.models.DeploymentSlot; +import com.microsoft.windowsazure.management.compute.models.DeploymentStatus; +import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; +import com.microsoft.windowsazure.management.compute.models.InstanceEndpoint; +import com.microsoft.windowsazure.management.compute.models.RoleInstance; import org.elasticsearch.Version; import org.elasticsearch.cloud.azure.AzureServiceDisableException; import org.elasticsearch.cloud.azure.AzureServiceRemoteException; @@ -41,8 +44,8 @@ import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.ArrayList; -import java.util.Locale; import java.util.List; +import java.util.Locale; /** * diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 77cfd6626d5..403b2638257 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -48,3 +48,17 @@ test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name } + +thirdPartyAudit.excludes = [ + // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl + // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault + // uses internal java api: com.sun.org.apache.xpath.internal.XPathContext + 'com.amazonaws.util.XpathUtils', + + // classes are missing + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', +] diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index b6306e6209c..349a513455c 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -23,7 +23,12 @@ import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; -import com.amazonaws.auth.*; +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.AWSCredentialsProviderChain; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.EnvironmentVariableCredentialsProvider; +import com.amazonaws.auth.InstanceProfileCredentialsProvider; +import com.amazonaws.auth.SystemPropertiesCredentialsProvider; import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.retry.RetryPolicy; import com.amazonaws.services.ec2.AmazonEC2; diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index f7e70281a3d..cafbae2671f 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -21,7 +21,12 @@ package org.elasticsearch.discovery.ec2; import com.amazonaws.AmazonClientException; import com.amazonaws.services.ec2.AmazonEC2; -import com.amazonaws.services.ec2.model.*; +import com.amazonaws.services.ec2.model.DescribeInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeInstancesResult; +import com.amazonaws.services.ec2.model.Filter; +import com.amazonaws.services.ec2.model.GroupIdentifier; +import com.amazonaws.services.ec2.model.Instance; +import com.amazonaws.services.ec2.model.Reservation; import org.elasticsearch.Version; import org.elasticsearch.cloud.aws.AwsEc2Service; import org.elasticsearch.cloud.aws.AwsEc2Service.DISCOVERY_EC2; @@ -36,7 +41,14 @@ import org.elasticsearch.common.util.SingleObjectCache; import org.elasticsearch.discovery.zen.ping.unicast.UnicastHostsProvider; import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; /** * diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java index e94b7618d12..aa3cef01d03 100755 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java @@ -22,12 +22,12 @@ package org.elasticsearch.discovery.ec2; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.ZenPingService; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -40,10 +40,10 @@ public class Ec2Discovery extends ZenDiscovery { @Inject public Ec2Discovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, NodeSettingsService nodeSettingsService, ZenPingService pingService, + ClusterService clusterService, ClusterSettings clusterSettings, ZenPingService pingService, DiscoverySettings discoverySettings, ElectMasterService electMasterService) { - super(settings, clusterName, threadPool, transportService, clusterService, nodeSettingsService, + super(settings, clusterName, threadPool, transportService, clusterService, clusterSettings, pingService, electMasterService, discoverySettings); } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java index d69d939e5b4..baaeb9b1b01 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.ClientConfiguration; - import org.elasticsearch.test.ESTestCase; import static org.hamcrest.CoreMatchers.is; diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java index 47e2554dcd4..88d87a2d8c2 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java @@ -25,11 +25,329 @@ import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.ResponseMetadata; import com.amazonaws.regions.Region; import com.amazonaws.services.ec2.AmazonEC2; -import com.amazonaws.services.ec2.model.*; +import com.amazonaws.services.ec2.model.AcceptVpcPeeringConnectionRequest; +import com.amazonaws.services.ec2.model.AcceptVpcPeeringConnectionResult; +import com.amazonaws.services.ec2.model.AllocateAddressRequest; +import com.amazonaws.services.ec2.model.AllocateAddressResult; +import com.amazonaws.services.ec2.model.AssignPrivateIpAddressesRequest; +import com.amazonaws.services.ec2.model.AssociateAddressRequest; +import com.amazonaws.services.ec2.model.AssociateAddressResult; +import com.amazonaws.services.ec2.model.AssociateDhcpOptionsRequest; +import com.amazonaws.services.ec2.model.AssociateRouteTableRequest; +import com.amazonaws.services.ec2.model.AssociateRouteTableResult; +import com.amazonaws.services.ec2.model.AttachClassicLinkVpcRequest; +import com.amazonaws.services.ec2.model.AttachClassicLinkVpcResult; +import com.amazonaws.services.ec2.model.AttachInternetGatewayRequest; +import com.amazonaws.services.ec2.model.AttachNetworkInterfaceRequest; +import com.amazonaws.services.ec2.model.AttachNetworkInterfaceResult; +import com.amazonaws.services.ec2.model.AttachVolumeRequest; +import com.amazonaws.services.ec2.model.AttachVolumeResult; +import com.amazonaws.services.ec2.model.AttachVpnGatewayRequest; +import com.amazonaws.services.ec2.model.AttachVpnGatewayResult; +import com.amazonaws.services.ec2.model.AuthorizeSecurityGroupEgressRequest; +import com.amazonaws.services.ec2.model.AuthorizeSecurityGroupIngressRequest; +import com.amazonaws.services.ec2.model.BundleInstanceRequest; +import com.amazonaws.services.ec2.model.BundleInstanceResult; +import com.amazonaws.services.ec2.model.CancelBundleTaskRequest; +import com.amazonaws.services.ec2.model.CancelBundleTaskResult; +import com.amazonaws.services.ec2.model.CancelConversionTaskRequest; +import com.amazonaws.services.ec2.model.CancelExportTaskRequest; +import com.amazonaws.services.ec2.model.CancelImportTaskRequest; +import com.amazonaws.services.ec2.model.CancelImportTaskResult; +import com.amazonaws.services.ec2.model.CancelReservedInstancesListingRequest; +import com.amazonaws.services.ec2.model.CancelReservedInstancesListingResult; +import com.amazonaws.services.ec2.model.CancelSpotFleetRequestsRequest; +import com.amazonaws.services.ec2.model.CancelSpotFleetRequestsResult; +import com.amazonaws.services.ec2.model.CancelSpotInstanceRequestsRequest; +import com.amazonaws.services.ec2.model.CancelSpotInstanceRequestsResult; +import com.amazonaws.services.ec2.model.ConfirmProductInstanceRequest; +import com.amazonaws.services.ec2.model.ConfirmProductInstanceResult; +import com.amazonaws.services.ec2.model.CopyImageRequest; +import com.amazonaws.services.ec2.model.CopyImageResult; +import com.amazonaws.services.ec2.model.CopySnapshotRequest; +import com.amazonaws.services.ec2.model.CopySnapshotResult; +import com.amazonaws.services.ec2.model.CreateCustomerGatewayRequest; +import com.amazonaws.services.ec2.model.CreateCustomerGatewayResult; +import com.amazonaws.services.ec2.model.CreateDhcpOptionsRequest; +import com.amazonaws.services.ec2.model.CreateDhcpOptionsResult; +import com.amazonaws.services.ec2.model.CreateFlowLogsRequest; +import com.amazonaws.services.ec2.model.CreateFlowLogsResult; +import com.amazonaws.services.ec2.model.CreateImageRequest; +import com.amazonaws.services.ec2.model.CreateImageResult; +import com.amazonaws.services.ec2.model.CreateInstanceExportTaskRequest; +import com.amazonaws.services.ec2.model.CreateInstanceExportTaskResult; +import com.amazonaws.services.ec2.model.CreateInternetGatewayRequest; +import com.amazonaws.services.ec2.model.CreateInternetGatewayResult; +import com.amazonaws.services.ec2.model.CreateKeyPairRequest; +import com.amazonaws.services.ec2.model.CreateKeyPairResult; +import com.amazonaws.services.ec2.model.CreateNetworkAclEntryRequest; +import com.amazonaws.services.ec2.model.CreateNetworkAclRequest; +import com.amazonaws.services.ec2.model.CreateNetworkAclResult; +import com.amazonaws.services.ec2.model.CreateNetworkInterfaceRequest; +import com.amazonaws.services.ec2.model.CreateNetworkInterfaceResult; +import com.amazonaws.services.ec2.model.CreatePlacementGroupRequest; +import com.amazonaws.services.ec2.model.CreateReservedInstancesListingRequest; +import com.amazonaws.services.ec2.model.CreateReservedInstancesListingResult; +import com.amazonaws.services.ec2.model.CreateRouteRequest; +import com.amazonaws.services.ec2.model.CreateRouteResult; +import com.amazonaws.services.ec2.model.CreateRouteTableRequest; +import com.amazonaws.services.ec2.model.CreateRouteTableResult; +import com.amazonaws.services.ec2.model.CreateSecurityGroupRequest; +import com.amazonaws.services.ec2.model.CreateSecurityGroupResult; +import com.amazonaws.services.ec2.model.CreateSnapshotRequest; +import com.amazonaws.services.ec2.model.CreateSnapshotResult; +import com.amazonaws.services.ec2.model.CreateSpotDatafeedSubscriptionRequest; +import com.amazonaws.services.ec2.model.CreateSpotDatafeedSubscriptionResult; +import com.amazonaws.services.ec2.model.CreateSubnetRequest; +import com.amazonaws.services.ec2.model.CreateSubnetResult; +import com.amazonaws.services.ec2.model.CreateTagsRequest; +import com.amazonaws.services.ec2.model.CreateVolumeRequest; +import com.amazonaws.services.ec2.model.CreateVolumeResult; +import com.amazonaws.services.ec2.model.CreateVpcEndpointRequest; +import com.amazonaws.services.ec2.model.CreateVpcEndpointResult; +import com.amazonaws.services.ec2.model.CreateVpcPeeringConnectionRequest; +import com.amazonaws.services.ec2.model.CreateVpcPeeringConnectionResult; +import com.amazonaws.services.ec2.model.CreateVpcRequest; +import com.amazonaws.services.ec2.model.CreateVpcResult; +import com.amazonaws.services.ec2.model.CreateVpnConnectionRequest; +import com.amazonaws.services.ec2.model.CreateVpnConnectionResult; +import com.amazonaws.services.ec2.model.CreateVpnConnectionRouteRequest; +import com.amazonaws.services.ec2.model.CreateVpnGatewayRequest; +import com.amazonaws.services.ec2.model.CreateVpnGatewayResult; +import com.amazonaws.services.ec2.model.DeleteCustomerGatewayRequest; +import com.amazonaws.services.ec2.model.DeleteDhcpOptionsRequest; +import com.amazonaws.services.ec2.model.DeleteFlowLogsRequest; +import com.amazonaws.services.ec2.model.DeleteFlowLogsResult; +import com.amazonaws.services.ec2.model.DeleteInternetGatewayRequest; +import com.amazonaws.services.ec2.model.DeleteKeyPairRequest; +import com.amazonaws.services.ec2.model.DeleteNetworkAclEntryRequest; +import com.amazonaws.services.ec2.model.DeleteNetworkAclRequest; +import com.amazonaws.services.ec2.model.DeleteNetworkInterfaceRequest; +import com.amazonaws.services.ec2.model.DeletePlacementGroupRequest; +import com.amazonaws.services.ec2.model.DeleteRouteRequest; +import com.amazonaws.services.ec2.model.DeleteRouteTableRequest; +import com.amazonaws.services.ec2.model.DeleteSecurityGroupRequest; +import com.amazonaws.services.ec2.model.DeleteSnapshotRequest; +import com.amazonaws.services.ec2.model.DeleteSpotDatafeedSubscriptionRequest; +import com.amazonaws.services.ec2.model.DeleteSubnetRequest; +import com.amazonaws.services.ec2.model.DeleteTagsRequest; +import com.amazonaws.services.ec2.model.DeleteVolumeRequest; +import com.amazonaws.services.ec2.model.DeleteVpcEndpointsRequest; +import com.amazonaws.services.ec2.model.DeleteVpcEndpointsResult; +import com.amazonaws.services.ec2.model.DeleteVpcPeeringConnectionRequest; +import com.amazonaws.services.ec2.model.DeleteVpcPeeringConnectionResult; +import com.amazonaws.services.ec2.model.DeleteVpcRequest; +import com.amazonaws.services.ec2.model.DeleteVpnConnectionRequest; +import com.amazonaws.services.ec2.model.DeleteVpnConnectionRouteRequest; +import com.amazonaws.services.ec2.model.DeleteVpnGatewayRequest; +import com.amazonaws.services.ec2.model.DeregisterImageRequest; +import com.amazonaws.services.ec2.model.DescribeAccountAttributesRequest; +import com.amazonaws.services.ec2.model.DescribeAccountAttributesResult; +import com.amazonaws.services.ec2.model.DescribeAddressesRequest; +import com.amazonaws.services.ec2.model.DescribeAddressesResult; +import com.amazonaws.services.ec2.model.DescribeAvailabilityZonesRequest; +import com.amazonaws.services.ec2.model.DescribeAvailabilityZonesResult; +import com.amazonaws.services.ec2.model.DescribeBundleTasksRequest; +import com.amazonaws.services.ec2.model.DescribeBundleTasksResult; +import com.amazonaws.services.ec2.model.DescribeClassicLinkInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeClassicLinkInstancesResult; +import com.amazonaws.services.ec2.model.DescribeConversionTasksRequest; +import com.amazonaws.services.ec2.model.DescribeConversionTasksResult; +import com.amazonaws.services.ec2.model.DescribeCustomerGatewaysRequest; +import com.amazonaws.services.ec2.model.DescribeCustomerGatewaysResult; +import com.amazonaws.services.ec2.model.DescribeDhcpOptionsRequest; +import com.amazonaws.services.ec2.model.DescribeDhcpOptionsResult; +import com.amazonaws.services.ec2.model.DescribeExportTasksRequest; +import com.amazonaws.services.ec2.model.DescribeExportTasksResult; +import com.amazonaws.services.ec2.model.DescribeFlowLogsRequest; +import com.amazonaws.services.ec2.model.DescribeFlowLogsResult; +import com.amazonaws.services.ec2.model.DescribeImageAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeImageAttributeResult; +import com.amazonaws.services.ec2.model.DescribeImagesRequest; +import com.amazonaws.services.ec2.model.DescribeImagesResult; +import com.amazonaws.services.ec2.model.DescribeImportImageTasksRequest; +import com.amazonaws.services.ec2.model.DescribeImportImageTasksResult; +import com.amazonaws.services.ec2.model.DescribeImportSnapshotTasksRequest; +import com.amazonaws.services.ec2.model.DescribeImportSnapshotTasksResult; +import com.amazonaws.services.ec2.model.DescribeInstanceAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeInstanceAttributeResult; +import com.amazonaws.services.ec2.model.DescribeInstanceStatusRequest; +import com.amazonaws.services.ec2.model.DescribeInstanceStatusResult; +import com.amazonaws.services.ec2.model.DescribeInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeInstancesResult; +import com.amazonaws.services.ec2.model.DescribeInternetGatewaysRequest; +import com.amazonaws.services.ec2.model.DescribeInternetGatewaysResult; +import com.amazonaws.services.ec2.model.DescribeKeyPairsRequest; +import com.amazonaws.services.ec2.model.DescribeKeyPairsResult; +import com.amazonaws.services.ec2.model.DescribeMovingAddressesRequest; +import com.amazonaws.services.ec2.model.DescribeMovingAddressesResult; +import com.amazonaws.services.ec2.model.DescribeNetworkAclsRequest; +import com.amazonaws.services.ec2.model.DescribeNetworkAclsResult; +import com.amazonaws.services.ec2.model.DescribeNetworkInterfaceAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeNetworkInterfaceAttributeResult; +import com.amazonaws.services.ec2.model.DescribeNetworkInterfacesRequest; +import com.amazonaws.services.ec2.model.DescribeNetworkInterfacesResult; +import com.amazonaws.services.ec2.model.DescribePlacementGroupsRequest; +import com.amazonaws.services.ec2.model.DescribePlacementGroupsResult; +import com.amazonaws.services.ec2.model.DescribePrefixListsRequest; +import com.amazonaws.services.ec2.model.DescribePrefixListsResult; +import com.amazonaws.services.ec2.model.DescribeRegionsRequest; +import com.amazonaws.services.ec2.model.DescribeRegionsResult; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesListingsRequest; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesListingsResult; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesModificationsRequest; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesModificationsResult; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesOfferingsRequest; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesOfferingsResult; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeReservedInstancesResult; +import com.amazonaws.services.ec2.model.DescribeRouteTablesRequest; +import com.amazonaws.services.ec2.model.DescribeRouteTablesResult; +import com.amazonaws.services.ec2.model.DescribeSecurityGroupsRequest; +import com.amazonaws.services.ec2.model.DescribeSecurityGroupsResult; +import com.amazonaws.services.ec2.model.DescribeSnapshotAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeSnapshotAttributeResult; +import com.amazonaws.services.ec2.model.DescribeSnapshotsRequest; +import com.amazonaws.services.ec2.model.DescribeSnapshotsResult; +import com.amazonaws.services.ec2.model.DescribeSpotDatafeedSubscriptionRequest; +import com.amazonaws.services.ec2.model.DescribeSpotDatafeedSubscriptionResult; +import com.amazonaws.services.ec2.model.DescribeSpotFleetInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeSpotFleetInstancesResult; +import com.amazonaws.services.ec2.model.DescribeSpotFleetRequestHistoryRequest; +import com.amazonaws.services.ec2.model.DescribeSpotFleetRequestHistoryResult; +import com.amazonaws.services.ec2.model.DescribeSpotFleetRequestsRequest; +import com.amazonaws.services.ec2.model.DescribeSpotFleetRequestsResult; +import com.amazonaws.services.ec2.model.DescribeSpotInstanceRequestsRequest; +import com.amazonaws.services.ec2.model.DescribeSpotInstanceRequestsResult; +import com.amazonaws.services.ec2.model.DescribeSpotPriceHistoryRequest; +import com.amazonaws.services.ec2.model.DescribeSpotPriceHistoryResult; +import com.amazonaws.services.ec2.model.DescribeSubnetsRequest; +import com.amazonaws.services.ec2.model.DescribeSubnetsResult; +import com.amazonaws.services.ec2.model.DescribeTagsRequest; +import com.amazonaws.services.ec2.model.DescribeTagsResult; +import com.amazonaws.services.ec2.model.DescribeVolumeAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeVolumeAttributeResult; +import com.amazonaws.services.ec2.model.DescribeVolumeStatusRequest; +import com.amazonaws.services.ec2.model.DescribeVolumeStatusResult; +import com.amazonaws.services.ec2.model.DescribeVolumesRequest; +import com.amazonaws.services.ec2.model.DescribeVolumesResult; +import com.amazonaws.services.ec2.model.DescribeVpcAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeVpcAttributeResult; +import com.amazonaws.services.ec2.model.DescribeVpcClassicLinkRequest; +import com.amazonaws.services.ec2.model.DescribeVpcClassicLinkResult; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointServicesRequest; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointServicesResult; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointsRequest; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointsResult; +import com.amazonaws.services.ec2.model.DescribeVpcPeeringConnectionsRequest; +import com.amazonaws.services.ec2.model.DescribeVpcPeeringConnectionsResult; +import com.amazonaws.services.ec2.model.DescribeVpcsRequest; +import com.amazonaws.services.ec2.model.DescribeVpcsResult; +import com.amazonaws.services.ec2.model.DescribeVpnConnectionsRequest; +import com.amazonaws.services.ec2.model.DescribeVpnConnectionsResult; +import com.amazonaws.services.ec2.model.DescribeVpnGatewaysRequest; +import com.amazonaws.services.ec2.model.DescribeVpnGatewaysResult; +import com.amazonaws.services.ec2.model.DetachClassicLinkVpcRequest; +import com.amazonaws.services.ec2.model.DetachClassicLinkVpcResult; +import com.amazonaws.services.ec2.model.DetachInternetGatewayRequest; +import com.amazonaws.services.ec2.model.DetachNetworkInterfaceRequest; +import com.amazonaws.services.ec2.model.DetachVolumeRequest; +import com.amazonaws.services.ec2.model.DetachVolumeResult; +import com.amazonaws.services.ec2.model.DetachVpnGatewayRequest; +import com.amazonaws.services.ec2.model.DisableVgwRoutePropagationRequest; +import com.amazonaws.services.ec2.model.DisableVpcClassicLinkRequest; +import com.amazonaws.services.ec2.model.DisableVpcClassicLinkResult; +import com.amazonaws.services.ec2.model.DisassociateAddressRequest; +import com.amazonaws.services.ec2.model.DisassociateRouteTableRequest; +import com.amazonaws.services.ec2.model.DryRunResult; +import com.amazonaws.services.ec2.model.DryRunSupportedRequest; +import com.amazonaws.services.ec2.model.EnableVgwRoutePropagationRequest; +import com.amazonaws.services.ec2.model.EnableVolumeIORequest; +import com.amazonaws.services.ec2.model.EnableVpcClassicLinkRequest; +import com.amazonaws.services.ec2.model.EnableVpcClassicLinkResult; +import com.amazonaws.services.ec2.model.Filter; +import com.amazonaws.services.ec2.model.GetConsoleOutputRequest; +import com.amazonaws.services.ec2.model.GetConsoleOutputResult; +import com.amazonaws.services.ec2.model.GetPasswordDataRequest; +import com.amazonaws.services.ec2.model.GetPasswordDataResult; +import com.amazonaws.services.ec2.model.ImportImageRequest; +import com.amazonaws.services.ec2.model.ImportImageResult; +import com.amazonaws.services.ec2.model.ImportInstanceRequest; +import com.amazonaws.services.ec2.model.ImportInstanceResult; +import com.amazonaws.services.ec2.model.ImportKeyPairRequest; +import com.amazonaws.services.ec2.model.ImportKeyPairResult; +import com.amazonaws.services.ec2.model.ImportSnapshotRequest; +import com.amazonaws.services.ec2.model.ImportSnapshotResult; +import com.amazonaws.services.ec2.model.ImportVolumeRequest; +import com.amazonaws.services.ec2.model.ImportVolumeResult; +import com.amazonaws.services.ec2.model.Instance; +import com.amazonaws.services.ec2.model.InstanceState; +import com.amazonaws.services.ec2.model.InstanceStateName; +import com.amazonaws.services.ec2.model.ModifyImageAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyInstanceAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyNetworkInterfaceAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyReservedInstancesRequest; +import com.amazonaws.services.ec2.model.ModifyReservedInstancesResult; +import com.amazonaws.services.ec2.model.ModifySnapshotAttributeRequest; +import com.amazonaws.services.ec2.model.ModifySpotFleetRequestRequest; +import com.amazonaws.services.ec2.model.ModifySpotFleetRequestResult; +import com.amazonaws.services.ec2.model.ModifySubnetAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyVolumeAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyVpcAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyVpcEndpointRequest; +import com.amazonaws.services.ec2.model.ModifyVpcEndpointResult; +import com.amazonaws.services.ec2.model.MonitorInstancesRequest; +import com.amazonaws.services.ec2.model.MonitorInstancesResult; +import com.amazonaws.services.ec2.model.MoveAddressToVpcRequest; +import com.amazonaws.services.ec2.model.MoveAddressToVpcResult; +import com.amazonaws.services.ec2.model.PurchaseReservedInstancesOfferingRequest; +import com.amazonaws.services.ec2.model.PurchaseReservedInstancesOfferingResult; +import com.amazonaws.services.ec2.model.RebootInstancesRequest; +import com.amazonaws.services.ec2.model.RegisterImageRequest; +import com.amazonaws.services.ec2.model.RegisterImageResult; +import com.amazonaws.services.ec2.model.RejectVpcPeeringConnectionRequest; +import com.amazonaws.services.ec2.model.RejectVpcPeeringConnectionResult; +import com.amazonaws.services.ec2.model.ReleaseAddressRequest; +import com.amazonaws.services.ec2.model.ReplaceNetworkAclAssociationRequest; +import com.amazonaws.services.ec2.model.ReplaceNetworkAclAssociationResult; +import com.amazonaws.services.ec2.model.ReplaceNetworkAclEntryRequest; +import com.amazonaws.services.ec2.model.ReplaceRouteRequest; +import com.amazonaws.services.ec2.model.ReplaceRouteTableAssociationRequest; +import com.amazonaws.services.ec2.model.ReplaceRouteTableAssociationResult; +import com.amazonaws.services.ec2.model.ReportInstanceStatusRequest; +import com.amazonaws.services.ec2.model.RequestSpotFleetRequest; +import com.amazonaws.services.ec2.model.RequestSpotFleetResult; +import com.amazonaws.services.ec2.model.RequestSpotInstancesRequest; +import com.amazonaws.services.ec2.model.RequestSpotInstancesResult; +import com.amazonaws.services.ec2.model.Reservation; +import com.amazonaws.services.ec2.model.ResetImageAttributeRequest; +import com.amazonaws.services.ec2.model.ResetInstanceAttributeRequest; +import com.amazonaws.services.ec2.model.ResetNetworkInterfaceAttributeRequest; +import com.amazonaws.services.ec2.model.ResetSnapshotAttributeRequest; +import com.amazonaws.services.ec2.model.RestoreAddressToClassicRequest; +import com.amazonaws.services.ec2.model.RestoreAddressToClassicResult; +import com.amazonaws.services.ec2.model.RevokeSecurityGroupEgressRequest; +import com.amazonaws.services.ec2.model.RevokeSecurityGroupIngressRequest; +import com.amazonaws.services.ec2.model.RunInstancesRequest; +import com.amazonaws.services.ec2.model.RunInstancesResult; +import com.amazonaws.services.ec2.model.StartInstancesRequest; +import com.amazonaws.services.ec2.model.StartInstancesResult; +import com.amazonaws.services.ec2.model.StopInstancesRequest; +import com.amazonaws.services.ec2.model.StopInstancesResult; +import com.amazonaws.services.ec2.model.Tag; +import com.amazonaws.services.ec2.model.TerminateInstancesRequest; +import com.amazonaws.services.ec2.model.TerminateInstancesResult; +import com.amazonaws.services.ec2.model.UnassignPrivateIpAddressesRequest; +import com.amazonaws.services.ec2.model.UnmonitorInstancesRequest; +import com.amazonaws.services.ec2.model.UnmonitorInstancesResult; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -508,12 +826,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeVolumesResult describeVolumes(DescribeVolumesRequest describeVolumesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesListingsResult describeReservedInstancesListings(DescribeReservedInstancesListingsRequest describeReservedInstancesListingsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -523,47 +841,47 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeRouteTablesResult describeRouteTables(DescribeRouteTablesRequest describeRouteTablesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeDhcpOptionsResult describeDhcpOptions(DescribeDhcpOptionsRequest describeDhcpOptionsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public MonitorInstancesResult monitorInstances(MonitorInstancesRequest monitorInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribePrefixListsResult describePrefixLists(DescribePrefixListsRequest describePrefixListsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public RequestSpotFleetResult requestSpotFleet(RequestSpotFleetRequest requestSpotFleetRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImportImageTasksResult describeImportImageTasks(DescribeImportImageTasksRequest describeImportImageTasksRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeNetworkAclsResult describeNetworkAcls(DescribeNetworkAclsRequest describeNetworkAclsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeBundleTasksResult describeBundleTasks(DescribeBundleTasksRequest describeBundleTasksRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ImportInstanceResult importInstance(ImportInstanceRequest importInstanceRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -573,17 +891,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DeleteVpcPeeringConnectionResult deleteVpcPeeringConnection(DeleteVpcPeeringConnectionRequest deleteVpcPeeringConnectionRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public GetConsoleOutputResult getConsoleOutput(GetConsoleOutputRequest getConsoleOutputRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateInternetGatewayResult createInternetGateway(CreateInternetGatewayRequest createInternetGatewayRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -603,12 +921,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CreateCustomerGatewayResult createCustomerGateway(CreateCustomerGatewayRequest createCustomerGatewayRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateSpotDatafeedSubscriptionResult createSpotDatafeedSubscription(CreateSpotDatafeedSubscriptionRequest createSpotDatafeedSubscriptionRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -623,62 +941,62 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeMovingAddressesResult describeMovingAddresses(DescribeMovingAddressesRequest describeMovingAddressesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeConversionTasksResult describeConversionTasks(DescribeConversionTasksRequest describeConversionTasksRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpnConnectionResult createVpnConnection(CreateVpnConnectionRequest createVpnConnectionRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ImportImageResult importImage(ImportImageRequest importImageRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DisableVpcClassicLinkResult disableVpcClassicLink(DisableVpcClassicLinkRequest disableVpcClassicLinkRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInstanceAttributeResult describeInstanceAttribute(DescribeInstanceAttributeRequest describeInstanceAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeFlowLogsResult describeFlowLogs(DescribeFlowLogsRequest describeFlowLogsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcPeeringConnectionsResult describeVpcPeeringConnections(DescribeVpcPeeringConnectionsRequest describeVpcPeeringConnectionsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribePlacementGroupsResult describePlacementGroups(DescribePlacementGroupsRequest describePlacementGroupsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public RunInstancesResult runInstances(RunInstancesRequest runInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSubnetsResult describeSubnets(DescribeSubnetsRequest describeSubnetsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public AssociateRouteTableResult associateRouteTable(AssociateRouteTableRequest associateRouteTableRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -693,12 +1011,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeImagesResult describeImages(DescribeImagesRequest describeImagesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public StartInstancesResult startInstances(StartInstancesRequest startInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -708,7 +1026,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CancelReservedInstancesListingResult cancelReservedInstancesListing(CancelReservedInstancesListingRequest cancelReservedInstancesListingRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -723,17 +1041,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeSpotInstanceRequestsResult describeSpotInstanceRequests(DescribeSpotInstanceRequestsRequest describeSpotInstanceRequestsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpcResult createVpc(CreateVpcRequest createVpcRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeCustomerGatewaysResult describeCustomerGateways(DescribeCustomerGatewaysRequest describeCustomerGatewaysRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -743,22 +1061,22 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CreateRouteResult createRoute(CreateRouteRequest createRouteRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpcEndpointResult createVpcEndpoint(CreateVpcEndpointRequest createVpcEndpointRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CopyImageResult copyImage(CopyImageRequest copyImageRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcClassicLinkResult describeVpcClassicLink(DescribeVpcClassicLinkRequest describeVpcClassicLinkRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -773,17 +1091,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeNetworkInterfaceAttributeResult describeNetworkInterfaceAttribute(DescribeNetworkInterfaceAttributeRequest describeNetworkInterfaceAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeClassicLinkInstancesResult describeClassicLinkInstances(DescribeClassicLinkInstancesRequest describeClassicLinkInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public RequestSpotInstancesResult requestSpotInstances(RequestSpotInstancesRequest requestSpotInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -793,12 +1111,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeVolumeAttributeResult describeVolumeAttribute(DescribeVolumeAttributeRequest describeVolumeAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public AttachNetworkInterfaceResult attachNetworkInterface(AttachNetworkInterfaceRequest attachNetworkInterfaceRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -808,12 +1126,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeTagsResult describeTags(DescribeTagsRequest describeTagsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelBundleTaskResult cancelBundleTask(CancelBundleTaskRequest cancelBundleTaskRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -823,22 +1141,22 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public ImportSnapshotResult importSnapshot(ImportSnapshotRequest importSnapshotRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelSpotInstanceRequestsResult cancelSpotInstanceRequests(CancelSpotInstanceRequestsRequest cancelSpotInstanceRequestsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotFleetRequestsResult describeSpotFleetRequests(DescribeSpotFleetRequestsRequest describeSpotFleetRequestsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public PurchaseReservedInstancesOfferingResult purchaseReservedInstancesOffering(PurchaseReservedInstancesOfferingRequest purchaseReservedInstancesOfferingRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -848,17 +1166,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeReservedInstancesModificationsResult describeReservedInstancesModifications(DescribeReservedInstancesModificationsRequest describeReservedInstancesModificationsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public TerminateInstancesResult terminateInstances(TerminateInstancesRequest terminateInstancesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifyVpcEndpointResult modifyVpcEndpoint(ModifyVpcEndpointRequest modifyVpcEndpointRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -873,32 +1191,32 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeSnapshotAttributeResult describeSnapshotAttribute(DescribeSnapshotAttributeRequest describeSnapshotAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ReplaceRouteTableAssociationResult replaceRouteTableAssociation(ReplaceRouteTableAssociationRequest replaceRouteTableAssociationRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeAddressesResult describeAddresses(DescribeAddressesRequest describeAddressesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImageAttributeResult describeImageAttribute(DescribeImageAttributeRequest describeImageAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeKeyPairsResult describeKeyPairs(DescribeKeyPairsRequest describeKeyPairsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ConfirmProductInstanceResult confirmProductInstance(ConfirmProductInstanceRequest confirmProductInstanceRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -908,7 +1226,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeVpcAttributeResult describeVpcAttribute(DescribeVpcAttributeRequest describeVpcAttributeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -923,27 +1241,27 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CreateVolumeResult createVolume(CreateVolumeRequest createVolumeRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInstanceStatusResult describeInstanceStatus(DescribeInstanceStatusRequest describeInstanceStatusRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpnGatewaysResult describeVpnGateways(DescribeVpnGatewaysRequest describeVpnGatewaysRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateSubnetResult createSubnet(CreateSubnetRequest createSubnetRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesOfferingsResult describeReservedInstancesOfferings(DescribeReservedInstancesOfferingsRequest describeReservedInstancesOfferingsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -953,7 +1271,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeSpotFleetRequestHistoryResult describeSpotFleetRequestHistory(DescribeSpotFleetRequestHistoryRequest describeSpotFleetRequestHistoryRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -963,7 +1281,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public ReplaceNetworkAclAssociationResult replaceNetworkAclAssociation(ReplaceNetworkAclAssociationRequest replaceNetworkAclAssociationRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -978,7 +1296,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public BundleInstanceResult bundleInstance(BundleInstanceRequest bundleInstanceRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -998,17 +1316,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CopySnapshotResult copySnapshot(CopySnapshotRequest copySnapshotRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcEndpointServicesResult describeVpcEndpointServices(DescribeVpcEndpointServicesRequest describeVpcEndpointServicesRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public AllocateAddressResult allocateAddress(AllocateAddressRequest allocateAddressRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1023,7 +1341,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CreateKeyPairResult createKeyPair(CreateKeyPairRequest createKeyPairRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1033,17 +1351,17 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeSnapshotsResult describeSnapshots(DescribeSnapshotsRequest describeSnapshotsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateNetworkAclResult createNetworkAcl(CreateNetworkAclRequest createNetworkAclRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public RegisterImageResult registerImage(RegisterImageRequest registerImageRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1053,7 +1371,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public EnableVpcClassicLinkResult enableVpcClassicLink(EnableVpcClassicLinkRequest enableVpcClassicLinkRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1063,137 +1381,137 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeVpcEndpointsResult describeVpcEndpoints(DescribeVpcEndpointsRequest describeVpcEndpointsRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DetachClassicLinkVpcResult detachClassicLinkVpc(DetachClassicLinkVpcRequest detachClassicLinkVpcRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesResult describeReservedInstances() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeAvailabilityZonesResult describeAvailabilityZones() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotPriceHistoryResult describeSpotPriceHistory() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeNetworkInterfacesResult describeNetworkInterfaces() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeRegionsResult describeRegions() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInternetGatewaysResult describeInternetGateways() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSecurityGroupsResult describeSecurityGroups() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotDatafeedSubscriptionResult describeSpotDatafeedSubscription() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeAccountAttributesResult describeAccountAttributes() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVolumeStatusResult describeVolumeStatus() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImportSnapshotTasksResult describeImportSnapshotTasks() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpnConnectionsResult describeVpnConnections() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcsResult describeVpcs() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public AcceptVpcPeeringConnectionResult acceptVpcPeeringConnection() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeExportTasksResult describeExportTasks() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpcPeeringConnectionResult createVpcPeeringConnection() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelImportTaskResult cancelImportTask() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVolumesResult describeVolumes() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesListingsResult describeReservedInstancesListings() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeRouteTablesResult describeRouteTables() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeDhcpOptionsResult describeDhcpOptions() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribePrefixListsResult describePrefixLists() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImportImageTasksResult describeImportImageTasks() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeNetworkAclsResult describeNetworkAcls() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeBundleTasksResult describeBundleTasks() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1203,92 +1521,92 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CreateInternetGatewayResult createInternetGateway() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeMovingAddressesResult describeMovingAddresses() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeConversionTasksResult describeConversionTasks() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ImportImageResult importImage() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeFlowLogsResult describeFlowLogs() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcPeeringConnectionsResult describeVpcPeeringConnections() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribePlacementGroupsResult describePlacementGroups() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSubnetsResult describeSubnets() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInstancesResult describeInstances() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImagesResult describeImages() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotInstanceRequestsResult describeSpotInstanceRequests() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeCustomerGatewaysResult describeCustomerGateways() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcClassicLinkResult describeVpcClassicLink() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeClassicLinkInstancesResult describeClassicLinkInstances() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeTagsResult describeTags() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ImportSnapshotResult importSnapshot() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotFleetRequestsResult describeSpotFleetRequests() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesModificationsResult describeReservedInstancesModifications() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1298,52 +1616,52 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeAddressesResult describeAddresses() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeKeyPairsResult describeKeyPairs() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInstanceStatusResult describeInstanceStatus() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpnGatewaysResult describeVpnGateways() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesOfferingsResult describeReservedInstancesOfferings() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcEndpointServicesResult describeVpcEndpointServices() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public AllocateAddressResult allocateAddress() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSnapshotsResult describeSnapshots() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcEndpointsResult describeVpcEndpoints() throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DryRunResult dryRun(DryRunSupportedRequest request) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1353,11 +1671,11 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest request) { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifySpotFleetRequestResult modifySpotFleetRequest(ModifySpotFleetRequestRequest modifySpotFleetRequestRequest) throws AmazonServiceException, AmazonClientException { - throw new UnsupportedOperationException("Not supported in mock"); + throw new UnsupportedOperationException("Not supported in mock"); } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 6f88be2be5a..bea0df9e8d8 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.discovery.ec2; import com.amazonaws.services.ec2.model.Tag; - import org.elasticsearch.Version; import org.elasticsearch.cloud.aws.AwsEc2Service; import org.elasticsearch.cloud.aws.AwsEc2Service.DISCOVERY_EC2; diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 4e6ade8788f..6f4459ef753 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -31,3 +31,14 @@ test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name } + +thirdPartyAudit.excludes = [ + // classes are missing + 'com.google.common.base.Splitter', + 'com.google.common.collect.Lists', + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', +] diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java index 07e05f06c6d..76172172bb8 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java @@ -30,9 +30,8 @@ import com.google.api.client.json.jackson2.JacksonFactory; import com.google.api.services.compute.Compute; import com.google.api.services.compute.model.Instance; import com.google.api.services.compute.model.InstanceList; - -import org.elasticsearch.SpecialPermission; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.SpecialPermission; import org.elasticsearch.cloud.gce.network.GceNameResolver; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; @@ -48,7 +47,11 @@ import java.security.GeneralSecurityException; import java.security.PrivilegedAction; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; public class GceComputeServiceImpl extends AbstractLifecycleComponent implements GceComputeService { diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java index f20d1c74f83..fe87b9244d4 100755 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java @@ -22,12 +22,12 @@ package org.elasticsearch.discovery.gce; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.ZenPingService; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -40,10 +40,10 @@ public class GceDiscovery extends ZenDiscovery { @Inject public GceDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, NodeSettingsService nodeSettingsService, ZenPingService pingService, + ClusterService clusterService, ClusterSettings clusterSettings, ZenPingService pingService, DiscoverySettings discoverySettings, ElectMasterService electMasterService) { - super(settings, clusterName, threadPool, transportService, clusterService, nodeSettingsService, + super(settings, clusterName, threadPool, transportService, clusterService, clusterSettings, pingService, electMasterService, discoverySettings); } } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index 476773dcc73..8ea93825bd1 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -22,7 +22,6 @@ package org.elasticsearch.discovery.gce; import com.google.api.services.compute.model.AccessConfig; import com.google.api.services.compute.model.Instance; import com.google.api.services.compute.model.NetworkInterface; - import org.elasticsearch.Version; import org.elasticsearch.cloud.gce.GceComputeService; import org.elasticsearch.cluster.node.DiscoveryNode; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java index 1d73e1d540e..22d759fc2dd 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java @@ -21,10 +21,14 @@ package org.elasticsearch.discovery.gce; import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.googleapis.testing.auth.oauth2.MockGoogleCredential; -import com.google.api.client.http.*; +import com.google.api.client.http.HttpBackOffIOExceptionHandler; +import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler; +import com.google.api.client.http.HttpRequest; +import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpResponse; +import com.google.api.client.http.HttpUnsuccessfulResponseHandler; import com.google.api.client.util.ExponentialBackOff; import com.google.api.client.util.Sleeper; - import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; @@ -65,7 +69,7 @@ public class RetryHttpInitializerWrapper implements HttpRequestInitializer { this.sleeper = sleeper; this.maxWait = maxWait; } - + // Use only for testing static MockGoogleCredential.Builder newMockCredentialBuilder() { // TODO: figure out why GCE is so bad like this diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java index 5f01a98a5f2..5bb5e27ce64 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java @@ -21,7 +21,6 @@ package org.elasticsearch.plugin.discovery.gce; import com.google.api.client.http.HttpHeaders; import com.google.api.client.util.ClassInfo; - import org.elasticsearch.SpecialPermission; import org.elasticsearch.cloud.gce.GceComputeService; import org.elasticsearch.cloud.gce.GceModule; diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java index ef92bd74305..9e48bc7d3df 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java @@ -34,7 +34,6 @@ import com.google.api.client.testing.http.MockLowLevelHttpRequest; import com.google.api.client.testing.http.MockLowLevelHttpResponse; import com.google.api.client.testing.util.MockSleeper; import com.google.api.services.compute.Compute; - import org.elasticsearch.test.ESTestCase; import java.io.IOException; diff --git a/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryPlugin.java b/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryPlugin.java index f0a734372fd..0d3c945ee2f 100644 --- a/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryPlugin.java +++ b/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryPlugin.java @@ -19,14 +19,10 @@ package org.elasticsearch.plugin.discovery.multicast; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoveryModule; -import org.elasticsearch.plugin.discovery.multicast.MulticastZenPing; import org.elasticsearch.plugins.Plugin; -import java.util.Collection; - public class MulticastDiscoveryPlugin extends Plugin { private final Settings settings; @@ -44,7 +40,7 @@ public class MulticastDiscoveryPlugin extends Plugin { public String description() { return "Multicast Discovery Plugin"; } - + public void onModule(DiscoveryModule module) { if (settings.getAsBoolean("discovery.zen.ping.multicast.enabled", false)) { module.addZenPing(MulticastZenPing.class); diff --git a/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPing.java b/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPing.java index f28bc08e9a6..82bf1bf088c 100644 --- a/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPing.java +++ b/plugins/discovery-multicast/src/main/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPing.java @@ -20,7 +20,6 @@ package org.elasticsearch.plugin.discovery.multicast; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.lucene.util.Constants; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.SpecialPermission; @@ -149,8 +148,8 @@ public class MulticastZenPing extends AbstractLifecycleComponent implem // may not even be bound to an interface on this machine! use the first bound address. List addresses = Arrays.asList(networkService.resolveBindHostAddresses(address == null ? null : new String[] { address })); NetworkUtils.sortAddresses(addresses); - - final MulticastChannel.Config config = new MulticastChannel.Config(port, group, bufferSize, ttl, + + final MulticastChannel.Config config = new MulticastChannel.Config(port, group, bufferSize, ttl, addresses.get(0), deferToInterface); SecurityManager sm = System.getSecurityManager(); if (sm != null) { diff --git a/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPingTests.java b/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPingTests.java index ba673127f4f..8c2d95ec799 100644 --- a/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPingTests.java +++ b/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastZenPingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.plugin.discovery.multicast; +import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -45,6 +46,7 @@ import java.net.InetAddress; import java.net.MulticastSocket; public class MulticastZenPingTests extends ESTestCase { + private Settings buildRandomMulticast(Settings settings) { Settings.Builder builder = Settings.builder().put(settings); builder.put("discovery.zen.ping.multicast.group", "224.2.3." + randomIntBetween(0, 255)); @@ -57,6 +59,7 @@ public class MulticastZenPingTests extends ESTestCase { } public void testSimplePings() throws InterruptedException { + assumeTrue("https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=193246", Constants.FREE_BSD == false); Settings settings = Settings.EMPTY; settings = buildRandomMulticast(settings); Thread.sleep(30000); @@ -129,8 +132,16 @@ public class MulticastZenPingTests extends ESTestCase { } } + // This test is here because when running on FreeBSD, if no tests are + // executed for the 'multicast' project it will assume everything + // failed, so we need to have at least one test that runs. + public void testAlwaysRun() throws Exception { + assertTrue(true); + } + @SuppressForbidden(reason = "I bind to wildcard addresses. I am a total nightmare") public void testExternalPing() throws Exception { + assumeTrue("https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=193246", Constants.FREE_BSD == false); Settings settings = Settings.EMPTY; settings = buildRandomMulticast(settings); diff --git a/plugins/jvm-example/build.gradle b/plugins/jvm-example/build.gradle index d8440eaecad..f0dd69ff8c4 100644 --- a/plugins/jvm-example/build.gradle +++ b/plugins/jvm-example/build.gradle @@ -27,3 +27,24 @@ test.enabled = false compileJava.options.compilerArgs << "-Xlint:-rawtypes" +configurations { + exampleFixture +} + +dependencies { + exampleFixture project(':test:fixtures:example-fixture') +} + +task exampleFixture(type: org.elasticsearch.gradle.test.Fixture) { + dependsOn project.configurations.exampleFixture + executable = new File(project.javaHome, 'bin/java') + args '-cp', "${ -> project.configurations.exampleFixture.asPath }", + 'example.ExampleTestFixture', + baseDir +} + +integTest { + dependsOn exampleFixture + systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }" +} + diff --git a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java index 9d1c36a90dd..d5e0a62ecb5 100644 --- a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java +++ b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java @@ -22,7 +22,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.cat.AbstractCatAction; import org.elasticsearch.rest.action.support.RestTable; diff --git a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java index 9dd9cb740ed..c1bcc65bfe2 100644 --- a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java +++ b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java @@ -28,7 +28,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesModule; import org.elasticsearch.rest.action.cat.AbstractCatAction; -import java.io.Closeable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; diff --git a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/ExampleExternalIT.java b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/ExampleExternalIT.java new file mode 100644 index 00000000000..1f48549aad4 --- /dev/null +++ b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/ExampleExternalIT.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.example; + +import org.elasticsearch.test.ESTestCase; + +import java.io.BufferedReader; +import java.io.InputStreamReader; +import java.net.InetAddress; +import java.net.Socket; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.Objects; + +public class ExampleExternalIT extends ESTestCase { + public void testExample() throws Exception { + String stringAddress = Objects.requireNonNull(System.getProperty("external.address")); + URL url = new URL("http://" + stringAddress); + InetAddress address = InetAddress.getByName(url.getHost()); + try (Socket socket = new Socket(address, url.getPort()); + BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))) { + assertEquals("TEST", reader.readLine()); + } + } +} diff --git a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java index 33a4e55801b..eca1265766d 100644 --- a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java +++ b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java @@ -27,14 +27,27 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.script.*; +import org.elasticsearch.script.ClassPermission; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.ScoreAccessor; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.javascript.support.NativeList; import org.elasticsearch.script.javascript.support.NativeMap; import org.elasticsearch.script.javascript.support.ScriptValueConverter; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; -import org.mozilla.javascript.*; +import org.mozilla.javascript.Context; +import org.mozilla.javascript.ContextFactory; +import org.mozilla.javascript.GeneratedClassLoader; +import org.mozilla.javascript.PolicySecurityController; import org.mozilla.javascript.Script; +import org.mozilla.javascript.Scriptable; +import org.mozilla.javascript.ScriptableObject; +import org.mozilla.javascript.SecurityController; +import org.mozilla.javascript.WrapFactory; import java.io.IOException; import java.net.MalformedURLException; @@ -62,7 +75,7 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements // one time initialization of rhino security manager integration private static final CodeSource DOMAIN; private static final int OPTIMIZATION_LEVEL = 1; - + static { try { DOMAIN = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null); @@ -110,7 +123,7 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements if (securityDomain != DOMAIN) { throw new SecurityException("illegal securityDomain: " + securityDomain); } - + return super.createClassLoader(parent, securityDomain); } }); @@ -157,7 +170,7 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { Context ctx = Context.enter(); try { return ctx.compileString(script, generateScriptName(), 1, DOMAIN); diff --git a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/support/ScriptValueConverter.java b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/support/ScriptValueConverter.java index f3a39896641..a90948c1877 100644 --- a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/support/ScriptValueConverter.java +++ b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/support/ScriptValueConverter.java @@ -19,9 +19,19 @@ package org.elasticsearch.script.javascript.support; -import org.mozilla.javascript.*; +import org.mozilla.javascript.Context; +import org.mozilla.javascript.IdScriptableObject; +import org.mozilla.javascript.NativeArray; +import org.mozilla.javascript.ScriptRuntime; +import org.mozilla.javascript.Scriptable; +import org.mozilla.javascript.Wrapper; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * Value Converter to marshal objects between Java and Javascript. diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineTests.java index fe9cc324f1c..9d8357bb582 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineTests.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineTests.java @@ -29,6 +29,7 @@ import org.junit.After; import org.junit.Before; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -54,7 +55,7 @@ public class JavaScriptScriptEngineTests extends ESTestCase { public void testSimpleEquation() { Map vars = new HashMap(); - Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testSimpleEquation", "js", se.compile("1 + 2")), vars).run(); + Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testSimpleEquation", "js", se.compile("1 + 2", Collections.emptyMap())), vars).run(); assertThat(((Number) o).intValue(), equalTo(3)); } @@ -64,20 +65,20 @@ public class JavaScriptScriptEngineTests extends ESTestCase { Map obj2 = MapBuilder.newMapBuilder().put("prop2", "value2").map(); Map obj1 = MapBuilder.newMapBuilder().put("prop1", "value1").put("obj2", obj2).put("l", Arrays.asList("2", "1")).map(); vars.put("obj1", obj1); - Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "js", se.compile("obj1")), vars).run(); + Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "js", se.compile("obj1", Collections.emptyMap())), vars).run(); assertThat(o, instanceOf(Map.class)); obj1 = (Map) o; assertThat((String) obj1.get("prop1"), equalTo("value1")); assertThat((String) ((Map) obj1.get("obj2")).get("prop2"), equalTo("value2")); - o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "js", se.compile("obj1.l[0]")), vars).run(); + o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "js", se.compile("obj1.l[0]", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("2")); } public void testJavaScriptObjectToMap() { Map vars = new HashMap(); Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testJavaScriptObjectToMap", "js", - se.compile("var obj1 = {}; obj1.prop1 = 'value1'; obj1.obj2 = {}; obj1.obj2.prop2 = 'value2'; obj1")), vars).run(); + se.compile("var obj1 = {}; obj1.prop1 = 'value1'; obj1.obj2 = {}; obj1.obj2.prop2 = 'value2'; obj1", Collections.emptyMap())), vars).run(); Map obj1 = (Map) o; assertThat((String) obj1.get("prop1"), equalTo("value1")); assertThat((String) ((Map) obj1.get("obj2")).get("prop2"), equalTo("value2")); @@ -92,7 +93,7 @@ public class JavaScriptScriptEngineTests extends ESTestCase { vars.put("ctx", ctx); ExecutableScript executable = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testJavaScriptObjectMapInter", "js", - se.compile("ctx.obj2 = {}; ctx.obj2.prop2 = 'value2'; ctx.obj1.prop1 = 'uvalue1'")), vars); + se.compile("ctx.obj2 = {}; ctx.obj2.prop2 = 'value2'; ctx.obj1.prop1 = 'uvalue1'", Collections.emptyMap())), vars); executable.run(); ctx = (Map) executable.unwrap(vars.get("ctx")); assertThat(ctx.containsKey("obj1"), equalTo(true)); @@ -106,7 +107,7 @@ public class JavaScriptScriptEngineTests extends ESTestCase { Map doc = new HashMap(); ctx.put("doc", doc); - Object compiled = se.compile("ctx.doc.field1 = ['value1', 'value2']"); + Object compiled = se.compile("ctx.doc.field1 = ['value1', 'value2']", Collections.emptyMap()); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testJavaScriptInnerArrayCreation", "js", compiled), new HashMap()); script.setNextVar("ctx", ctx); @@ -124,21 +125,21 @@ public class JavaScriptScriptEngineTests extends ESTestCase { vars.put("l", Arrays.asList("1", "2", "3", obj1)); Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessInScript", "js", - se.compile("l.length")), vars).run(); + se.compile("l.length", Collections.emptyMap())), vars).run(); assertThat(((Number) o).intValue(), equalTo(4)); o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessInScript", "js", - se.compile("l[0]")), vars).run(); + se.compile("l[0]", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("1")); o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessInScript", "js", - se.compile("l[3]")), vars).run(); + se.compile("l[3]", Collections.emptyMap())), vars).run(); obj1 = (Map) o; assertThat((String) obj1.get("prop1"), equalTo("value1")); assertThat((String) ((Map) obj1.get("obj2")).get("prop2"), equalTo("value2")); o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessInScript", "js", - se.compile("l[3].prop1")), vars).run(); + se.compile("l[3].prop1", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("value1")); } @@ -146,7 +147,7 @@ public class JavaScriptScriptEngineTests extends ESTestCase { Map vars = new HashMap(); Map ctx = new HashMap(); vars.put("ctx", ctx); - Object compiledScript = se.compile("ctx.value"); + Object compiledScript = se.compile("ctx.value", Collections.emptyMap()); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution1", "js", compiledScript), vars); @@ -161,7 +162,7 @@ public class JavaScriptScriptEngineTests extends ESTestCase { public void testChangingVarsCrossExecution2() { Map vars = new HashMap(); - Object compiledScript = se.compile("value"); + Object compiledScript = se.compile("value", Collections.emptyMap()); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution2", "js", compiledScript), vars); diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java index 2308e666c51..2aa6e13a99f 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -40,7 +41,7 @@ import static org.hamcrest.Matchers.equalTo; public class JavaScriptScriptMultiThreadedTests extends ESTestCase { public void testExecutableNoRuntimeParams() throws Exception { final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - final Object compiled = se.compile("x + y"); + final Object compiled = se.compile("x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); Thread[] threads = new Thread[50]; @@ -82,7 +83,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { public void testExecutableWithRuntimeParams() throws Exception { final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - final Object compiled = se.compile("x + y"); + final Object compiled = se.compile("x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); Thread[] threads = new Thread[50]; @@ -124,7 +125,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { public void testExecute() throws Exception { final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - final Object compiled = se.compile("x + y"); + final Object compiled = se.compile("x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); Thread[] threads = new Thread[50]; diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java index c6f9805f818..dccc36d1bf7 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.ESTestCase; import org.mozilla.javascript.EcmaError; import org.mozilla.javascript.WrappedException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -33,7 +34,7 @@ import java.util.Map; * Tests for the Javascript security permissions */ public class JavaScriptSecurityTests extends ESTestCase { - + private JavaScriptScriptEngineService se; @Override @@ -53,14 +54,14 @@ public class JavaScriptSecurityTests extends ESTestCase { /** runs a script */ private void doTest(String script) { Map vars = new HashMap(); - se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script)), vars).run(); + se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script, Collections.emptyMap())), vars).run(); } - + /** asserts that a script runs without exception */ private void assertSuccess(String script) { doTest(script); } - + /** assert that a security exception is hit */ private void assertFailure(String script, Class exceptionClass) { try { @@ -78,13 +79,13 @@ public class JavaScriptSecurityTests extends ESTestCase { } } } - + /** Test some javascripts that are ok */ public void testOK() { assertSuccess("1 + 2"); assertSuccess("Math.cos(Math.PI)"); } - + /** Test some javascripts that should hit security exception */ public void testNotOK() throws Exception { // sanity check :) diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java index bb7eb31c85d..3445c116057 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -35,7 +36,7 @@ public class SimpleBench { public static void main(String[] args) { JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - Object compiled = se.compile("x + y"); + Object compiled = se.compile("x + y", Collections.emptyMap()); CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled); Map vars = new HashMap(); diff --git a/plugins/lang-plan-a/build.gradle b/plugins/lang-plan-a/build.gradle index 618c094f683..dc0cfca2fa7 100644 --- a/plugins/lang-plan-a/build.gradle +++ b/plugins/lang-plan-a/build.gradle @@ -28,6 +28,11 @@ dependencies { compile 'org.antlr:antlr4-runtime:4.5.1-1' compile 'org.ow2.asm:asm:5.0.4' compile 'org.ow2.asm:asm-commons:5.0.4' + compile 'org.ow2.asm:asm-tree:5.0.4' +} + +dependencyLicenses { + mapping from: /asm-.*/, to: 'asm' } compileJava.options.compilerArgs << '-Xlint:-cast,-fallthrough,-rawtypes' diff --git a/plugins/lang-plan-a/licenses/asm-commons-LICENSE.txt b/plugins/lang-plan-a/licenses/asm-commons-LICENSE.txt deleted file mode 100644 index afb064f2f26..00000000000 --- a/plugins/lang-plan-a/licenses/asm-commons-LICENSE.txt +++ /dev/null @@ -1,26 +0,0 @@ -Copyright (c) 2012 France Télécom -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. -3. Neither the name of the copyright holders nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF -THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/lang-plan-a/licenses/asm-commons-NOTICE.txt b/plugins/lang-plan-a/licenses/asm-commons-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3f..00000000000 --- a/plugins/lang-plan-a/licenses/asm-commons-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/plugins/lang-plan-a/licenses/asm-tree-5.0.4.jar.sha1 b/plugins/lang-plan-a/licenses/asm-tree-5.0.4.jar.sha1 new file mode 100644 index 00000000000..5822a485a61 --- /dev/null +++ b/plugins/lang-plan-a/licenses/asm-tree-5.0.4.jar.sha1 @@ -0,0 +1 @@ +396ce0c07ba2b481f25a70195c7c94922f0d1b0b \ No newline at end of file diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java index baa06f45ff8..9788e63c3d7 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java @@ -19,14 +19,16 @@ package org.elasticsearch.plan.a; -import java.util.HashMap; -import java.util.Map; - import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTree; -import static org.elasticsearch.plan.a.Definition.*; -import static org.elasticsearch.plan.a.PlanAParser.*; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.plan.a.Definition.Cast; +import static org.elasticsearch.plan.a.Definition.Type; +import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; +import static org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; class Adapter { static class StatementMetadata { @@ -226,7 +228,7 @@ class Adapter { return sourceemd; } - + ExpressionMetadata getExpressionMetadata(final ParserRuleContext source) { final ExpressionMetadata sourceemd = expressionMetadata.get(source); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java index a7e2986d633..a20c32965b2 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java @@ -19,6 +19,8 @@ package org.elasticsearch.plan.a; +import org.antlr.v4.runtime.ParserRuleContext; + import java.util.ArrayDeque; import java.util.Arrays; import java.util.Deque; @@ -26,11 +28,81 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import org.antlr.v4.runtime.ParserRuleContext; - -import static org.elasticsearch.plan.a.Adapter.*; -import static org.elasticsearch.plan.a.Definition.*; -import static org.elasticsearch.plan.a.PlanAParser.*; +import static org.elasticsearch.plan.a.Adapter.ExpressionMetadata; +import static org.elasticsearch.plan.a.Adapter.ExtNodeMetadata; +import static org.elasticsearch.plan.a.Adapter.ExternalMetadata; +import static org.elasticsearch.plan.a.Adapter.StatementMetadata; +import static org.elasticsearch.plan.a.Adapter.error; +import static org.elasticsearch.plan.a.Definition.Cast; +import static org.elasticsearch.plan.a.Definition.Constructor; +import static org.elasticsearch.plan.a.Definition.Field; +import static org.elasticsearch.plan.a.Definition.Method; +import static org.elasticsearch.plan.a.Definition.Pair; +import static org.elasticsearch.plan.a.Definition.Sort; +import static org.elasticsearch.plan.a.Definition.Struct; +import static org.elasticsearch.plan.a.Definition.Transform; +import static org.elasticsearch.plan.a.Definition.Type; +import static org.elasticsearch.plan.a.PlanAParser.ADD; +import static org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; +import static org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; +import static org.elasticsearch.plan.a.PlanAParser.AssignmentContext; +import static org.elasticsearch.plan.a.PlanAParser.BWAND; +import static org.elasticsearch.plan.a.PlanAParser.BWOR; +import static org.elasticsearch.plan.a.PlanAParser.BWXOR; +import static org.elasticsearch.plan.a.PlanAParser.BinaryContext; +import static org.elasticsearch.plan.a.PlanAParser.BlockContext; +import static org.elasticsearch.plan.a.PlanAParser.BoolContext; +import static org.elasticsearch.plan.a.PlanAParser.BreakContext; +import static org.elasticsearch.plan.a.PlanAParser.CastContext; +import static org.elasticsearch.plan.a.PlanAParser.CharContext; +import static org.elasticsearch.plan.a.PlanAParser.CompContext; +import static org.elasticsearch.plan.a.PlanAParser.ConditionalContext; +import static org.elasticsearch.plan.a.PlanAParser.ContinueContext; +import static org.elasticsearch.plan.a.PlanAParser.DIV; +import static org.elasticsearch.plan.a.PlanAParser.DeclContext; +import static org.elasticsearch.plan.a.PlanAParser.DeclarationContext; +import static org.elasticsearch.plan.a.PlanAParser.DecltypeContext; +import static org.elasticsearch.plan.a.PlanAParser.DeclvarContext; +import static org.elasticsearch.plan.a.PlanAParser.DoContext; +import static org.elasticsearch.plan.a.PlanAParser.EmptyContext; +import static org.elasticsearch.plan.a.PlanAParser.ExprContext; +import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtcallContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtcastContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtdotContext; +import static org.elasticsearch.plan.a.PlanAParser.ExternalContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtfieldContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtnewContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtprecContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtstartContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtstringContext; +import static org.elasticsearch.plan.a.PlanAParser.ExttypeContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtvarContext; +import static org.elasticsearch.plan.a.PlanAParser.FalseContext; +import static org.elasticsearch.plan.a.PlanAParser.ForContext; +import static org.elasticsearch.plan.a.PlanAParser.IfContext; +import static org.elasticsearch.plan.a.PlanAParser.IncrementContext; +import static org.elasticsearch.plan.a.PlanAParser.InitializerContext; +import static org.elasticsearch.plan.a.PlanAParser.LSH; +import static org.elasticsearch.plan.a.PlanAParser.MUL; +import static org.elasticsearch.plan.a.PlanAParser.MultipleContext; +import static org.elasticsearch.plan.a.PlanAParser.NullContext; +import static org.elasticsearch.plan.a.PlanAParser.NumericContext; +import static org.elasticsearch.plan.a.PlanAParser.PostincContext; +import static org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; +import static org.elasticsearch.plan.a.PlanAParser.PreincContext; +import static org.elasticsearch.plan.a.PlanAParser.REM; +import static org.elasticsearch.plan.a.PlanAParser.RSH; +import static org.elasticsearch.plan.a.PlanAParser.ReturnContext; +import static org.elasticsearch.plan.a.PlanAParser.SUB; +import static org.elasticsearch.plan.a.PlanAParser.SingleContext; +import static org.elasticsearch.plan.a.PlanAParser.SourceContext; +import static org.elasticsearch.plan.a.PlanAParser.StatementContext; +import static org.elasticsearch.plan.a.PlanAParser.TrueContext; +import static org.elasticsearch.plan.a.PlanAParser.USH; +import static org.elasticsearch.plan.a.PlanAParser.UnaryContext; +import static org.elasticsearch.plan.a.PlanAParser.WhileContext; class Analyzer extends PlanAParserBaseVisitor { private static class Variable { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java index 6f4a23765b5..4d6936a0d72 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java @@ -19,17 +19,17 @@ package org.elasticsearch.plan.a; +import org.antlr.v4.runtime.ANTLRInputStream; +import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.bootstrap.BootstrapInfo; + import java.net.MalformedURLException; import java.net.URL; import java.security.CodeSource; import java.security.SecureClassLoader; import java.security.cert.Certificate; -import org.antlr.v4.runtime.ANTLRInputStream; -import org.antlr.v4.runtime.CommonTokenStream; -import org.antlr.v4.runtime.ParserRuleContext; -import org.elasticsearch.bootstrap.BootstrapInfo; - final class Compiler { private static Definition DEFAULT_DEFINITION = new Definition(new Definition()); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java index 2a1eb13408c..bd9b146e41e 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java @@ -24,7 +24,12 @@ import java.lang.reflect.Array; import java.util.List; import java.util.Map; -import static org.elasticsearch.plan.a.Definition.*; +import static org.elasticsearch.plan.a.Definition.Cast; +import static org.elasticsearch.plan.a.Definition.Field; +import static org.elasticsearch.plan.a.Definition.Method; +import static org.elasticsearch.plan.a.Definition.Struct; +import static org.elasticsearch.plan.a.Definition.Transform; +import static org.elasticsearch.plan.a.Definition.Type; public class Def { public static Object methodCall(final Object owner, final String name, final Definition definition, @@ -551,7 +556,7 @@ public class Def { throw new ClassCastException("Cannot apply [%] operation to types " + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); } - + public static Object add(final Object left, final Object right) { if (left instanceof String || right instanceof String) { return "" + left + right; @@ -738,7 +743,7 @@ public class Def { throw new ClassCastException("Cannot apply [>>>] operation to types " + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); } - + public static Object and(final Object left, final Object right) { if (left instanceof Boolean && right instanceof Boolean) { return (boolean)left && (boolean)right; diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java index 95e3c93a354..4963815f470 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java @@ -19,12 +19,12 @@ package org.elasticsearch.plan.a; * under the License. */ -import java.text.ParseException; - import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.LexerNoViableAltException; import org.antlr.v4.runtime.misc.Interval; +import java.text.ParseException; + class ErrorHandlingLexer extends PlanALexer { public ErrorHandlingLexer(CharStream charStream) { super(charStream); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java index 3fe36034792..5032ae3222a 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java @@ -19,8 +19,6 @@ package org.elasticsearch.plan.a; * under the License. */ -import java.text.ParseException; - import org.antlr.v4.runtime.DefaultErrorStrategy; import org.antlr.v4.runtime.InputMismatchException; import org.antlr.v4.runtime.NoViableAltException; @@ -28,6 +26,8 @@ import org.antlr.v4.runtime.Parser; import org.antlr.v4.runtime.RecognitionException; import org.antlr.v4.runtime.Token; +import java.text.ParseException; + class ParserErrorStrategy extends DefaultErrorStrategy { @Override public void recover(Parser recognizer, RecognitionException re) { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java index a9e5ff623bf..e35df0102d2 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java @@ -1,16 +1,19 @@ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.plan.a; - import java.util.Set; - -import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.runtime.Lexer; +import org.antlr.v4.runtime.RuleContext; +import org.antlr.v4.runtime.RuntimeMetaData; +import org.antlr.v4.runtime.Vocabulary; +import org.antlr.v4.runtime.VocabularyImpl; +import org.antlr.v4.runtime.atn.ATN; +import org.antlr.v4.runtime.atn.ATNDeserializer; +import org.antlr.v4.runtime.atn.LexerATNSimulator; +import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.misc.*; + +import java.util.Set; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class PlanALexer extends Lexer { @@ -20,15 +23,15 @@ class PlanALexer extends Lexer { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, - BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25, - MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34, - LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43, - BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, - AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, - ALSH=60, ARSH=61, AUSH=62, ACAT=63, OCTAL=64, HEX=65, INTEGER=66, DECIMAL=67, - STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, + WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, + COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, + BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25, + MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34, + LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43, + BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, + AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, + ALSH=60, ARSH=61, AUSH=62, ACAT=63, OCTAL=64, HEX=65, INTEGER=66, DECIMAL=67, + STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, EXTID=76; public static final int EXT = 1; public static String[] modeNames = { @@ -36,36 +39,36 @@ class PlanALexer extends Lexer { }; public static final String[] ruleNames = { - "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOT", - "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", - "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", - "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", - "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", - "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", - "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", - "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", + "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOT", + "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", + "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", + "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", + "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", + "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", + "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", "NULL", "TYPE", "GENERIC", "ID", "EXTINTEGER", "EXTID" }; private static final String[] _LITERAL_NAMES = { - null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", - "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", - "'return'", "'new'", "'try'", "'catch'", "'throw'", "'!'", "'~'", "'*'", - "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", - "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", - "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", - "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", "'..='", null, + null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", + "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", + "'return'", "'new'", "'try'", "'catch'", "'throw'", "'!'", "'~'", "'*'", + "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", + "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", + "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", + "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", "'..='", null, null, null, null, null, null, "'true'", "'false'", "'null'" }; private static final String[] _SYMBOLIC_NAMES = { - null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", - "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", - "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", - "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", - "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", - "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", - "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", + null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", + "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", + "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", + "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", + "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", + "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", + "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", "NULL", "TYPE", "ID", "EXTINTEGER", "EXTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java index 13f61acb495..da9943385c0 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java @@ -1,13 +1,25 @@ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.plan.a; -import org.antlr.v4.runtime.atn.*; + +import org.antlr.v4.runtime.FailedPredicateException; +import org.antlr.v4.runtime.NoViableAltException; +import org.antlr.v4.runtime.Parser; +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.RecognitionException; +import org.antlr.v4.runtime.RuleContext; +import org.antlr.v4.runtime.RuntimeMetaData; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.Vocabulary; +import org.antlr.v4.runtime.VocabularyImpl; +import org.antlr.v4.runtime.atn.ATN; +import org.antlr.v4.runtime.atn.ATNDeserializer; +import org.antlr.v4.runtime.atn.ParserATNSimulator; +import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.misc.*; -import org.antlr.v4.runtime.tree.*; +import org.antlr.v4.runtime.tree.ParseTreeVisitor; +import org.antlr.v4.runtime.tree.TerminalNode; + import java.util.List; -import java.util.Iterator; -import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class PlanAParser extends Parser { @@ -17,49 +29,49 @@ class PlanAParser extends Parser { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, - BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25, - MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34, - LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43, - BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, - AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, - ALSH=60, ARSH=61, AUSH=62, ACAT=63, OCTAL=64, HEX=65, INTEGER=66, DECIMAL=67, - STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, + WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, + COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, + BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25, + MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34, + LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43, + BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, + AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, + ALSH=60, ARSH=61, AUSH=62, ACAT=63, OCTAL=64, HEX=65, INTEGER=66, DECIMAL=67, + STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, EXTID=76; public static final int - RULE_source = 0, RULE_statement = 1, RULE_block = 2, RULE_empty = 3, RULE_initializer = 4, - RULE_afterthought = 5, RULE_declaration = 6, RULE_decltype = 7, RULE_declvar = 8, - RULE_expression = 9, RULE_extstart = 10, RULE_extprec = 11, RULE_extcast = 12, - RULE_extbrace = 13, RULE_extdot = 14, RULE_exttype = 15, RULE_extcall = 16, - RULE_extvar = 17, RULE_extfield = 18, RULE_extnew = 19, RULE_extstring = 20, + RULE_source = 0, RULE_statement = 1, RULE_block = 2, RULE_empty = 3, RULE_initializer = 4, + RULE_afterthought = 5, RULE_declaration = 6, RULE_decltype = 7, RULE_declvar = 8, + RULE_expression = 9, RULE_extstart = 10, RULE_extprec = 11, RULE_extcast = 12, + RULE_extbrace = 13, RULE_extdot = 14, RULE_exttype = 15, RULE_extcall = 16, + RULE_extvar = 17, RULE_extfield = 18, RULE_extnew = 19, RULE_extstring = 20, RULE_arguments = 21, RULE_increment = 22; public static final String[] ruleNames = { - "source", "statement", "block", "empty", "initializer", "afterthought", - "declaration", "decltype", "declvar", "expression", "extstart", "extprec", - "extcast", "extbrace", "extdot", "exttype", "extcall", "extvar", "extfield", + "source", "statement", "block", "empty", "initializer", "afterthought", + "declaration", "decltype", "declvar", "expression", "extstart", "extprec", + "extcast", "extbrace", "extdot", "exttype", "extcall", "extvar", "extfield", "extnew", "extstring", "arguments", "increment" }; private static final String[] _LITERAL_NAMES = { - null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", - "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", - "'return'", "'new'", "'try'", "'catch'", "'throw'", "'!'", "'~'", "'*'", - "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", - "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", - "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", - "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", "'..='", null, + null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", + "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", + "'return'", "'new'", "'try'", "'catch'", "'throw'", "'!'", "'~'", "'*'", + "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", + "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", + "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", + "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", "'..='", null, null, null, null, null, null, "'true'", "'false'", "'null'" }; private static final String[] _SYMBOLIC_NAMES = { - null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", - "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", - "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", - "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", - "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", - "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", - "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", + null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", + "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", + "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", + "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", + "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", + "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", + "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", "NULL", "TYPE", "ID", "EXTINTEGER", "EXTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -137,7 +149,7 @@ class PlanAParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(47); + setState(47); _errHandler.sync(this); _la = _input.LA(1); do { @@ -147,7 +159,7 @@ class PlanAParser extends Parser { statement(); } } - setState(49); + setState(49); _errHandler.sync(this); _la = _input.LA(1); } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0) ); @@ -171,7 +183,7 @@ class PlanAParser extends Parser { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_statement; } - + public StatementContext() { } public void copyFrom(StatementContext ctx) { super.copyFrom(ctx); @@ -660,7 +672,7 @@ class PlanAParser extends Parser { match(TRY); setState(115); block(); - setState(123); + setState(123); _errHandler.sync(this); _alt = 1; do { @@ -688,7 +700,7 @@ class PlanAParser extends Parser { default: throw new NoViableAltException(this); } - setState(125); + setState(125); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,12,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -748,7 +760,7 @@ class PlanAParser extends Parser { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_block; } - + public BlockContext() { } public void copyFrom(BlockContext ctx) { super.copyFrom(ctx); @@ -1163,7 +1175,7 @@ class PlanAParser extends Parser { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_expression; } - + public ExpressionContext() { } public void copyFrom(ExpressionContext ctx) { super.copyFrom(ctx); @@ -1742,7 +1754,7 @@ class PlanAParser extends Parser { } break; } - } + } } setState(249); _errHandler.sync(this); @@ -2476,7 +2488,7 @@ class PlanAParser extends Parser { case LBRACE: { { - setState(325); + setState(325); _errHandler.sync(this); _alt = 1; do { @@ -2496,7 +2508,7 @@ class PlanAParser extends Parser { default: throw new NoViableAltException(this); } - setState(327); + setState(327); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java index 6b3cd834715..69736f311e6 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java @@ -37,15 +37,16 @@ import java.security.AccessController; import java.security.Permissions; import java.security.PrivilegedAction; import java.security.ProtectionDomain; +import java.util.HashMap; import java.util.Map; public class PlanAScriptEngineService extends AbstractComponent implements ScriptEngineService { public static final String NAME = "plan-a"; - // TODO: this should really be per-script since scripts do so many different things? - private static final CompilerSettings compilerSettings = new CompilerSettings(); - - public static final String NUMERIC_OVERFLOW = "plan-a.numeric_overflow"; + // default settings, used unless otherwise specified + private static final CompilerSettings DEFAULT_COMPILER_SETTINGS = new CompilerSettings(); + + public static final String NUMERIC_OVERFLOW = "numeric_overflow"; // TODO: how should custom definitions be specified? private Definition definition = null; @@ -53,7 +54,6 @@ public class PlanAScriptEngineService extends AbstractComponent implements Scrip @Inject public PlanAScriptEngineService(Settings settings) { super(settings); - compilerSettings.setNumericOverflow(settings.getAsBoolean(NUMERIC_OVERFLOW, compilerSettings.getNumericOverflow())); } public void setDefinition(final Definition definition) { @@ -86,7 +86,23 @@ public class PlanAScriptEngineService extends AbstractComponent implements Scrip } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { + final CompilerSettings compilerSettings; + if (params.isEmpty()) { + compilerSettings = DEFAULT_COMPILER_SETTINGS; + } else { + // custom settings + compilerSettings = new CompilerSettings(); + Map clone = new HashMap<>(params); + String value = clone.remove(NUMERIC_OVERFLOW); + if (value != null) { + // TODO: can we get a real boolean parser in here? + compilerSettings.setNumericOverflow(Boolean.parseBoolean(value)); + } + if (!clone.isEmpty()) { + throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + clone); + } + } // check we ourselves are not being called by unprivileged code SecurityManager sm = System.getSecurityManager(); if (sm != null) { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java index 3756e02f8dc..4f3361576c4 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java @@ -34,9 +34,79 @@ import java.util.List; import java.util.Map; import java.util.Set; -import static org.elasticsearch.plan.a.Adapter.*; -import static org.elasticsearch.plan.a.Definition.*; -import static org.elasticsearch.plan.a.PlanAParser.*; +import static org.elasticsearch.plan.a.Adapter.ExpressionMetadata; +import static org.elasticsearch.plan.a.Adapter.ExtNodeMetadata; +import static org.elasticsearch.plan.a.Adapter.ExternalMetadata; +import static org.elasticsearch.plan.a.Adapter.StatementMetadata; +import static org.elasticsearch.plan.a.Adapter.error; +import static org.elasticsearch.plan.a.Definition.Cast; +import static org.elasticsearch.plan.a.Definition.Constructor; +import static org.elasticsearch.plan.a.Definition.Field; +import static org.elasticsearch.plan.a.Definition.Method; +import static org.elasticsearch.plan.a.Definition.Sort; +import static org.elasticsearch.plan.a.Definition.Transform; +import static org.elasticsearch.plan.a.Definition.Type; +import static org.elasticsearch.plan.a.PlanAParser.ADD; +import static org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; +import static org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; +import static org.elasticsearch.plan.a.PlanAParser.AssignmentContext; +import static org.elasticsearch.plan.a.PlanAParser.BWAND; +import static org.elasticsearch.plan.a.PlanAParser.BWOR; +import static org.elasticsearch.plan.a.PlanAParser.BWXOR; +import static org.elasticsearch.plan.a.PlanAParser.BinaryContext; +import static org.elasticsearch.plan.a.PlanAParser.BlockContext; +import static org.elasticsearch.plan.a.PlanAParser.BoolContext; +import static org.elasticsearch.plan.a.PlanAParser.BreakContext; +import static org.elasticsearch.plan.a.PlanAParser.CastContext; +import static org.elasticsearch.plan.a.PlanAParser.CharContext; +import static org.elasticsearch.plan.a.PlanAParser.CompContext; +import static org.elasticsearch.plan.a.PlanAParser.ConditionalContext; +import static org.elasticsearch.plan.a.PlanAParser.ContinueContext; +import static org.elasticsearch.plan.a.PlanAParser.DIV; +import static org.elasticsearch.plan.a.PlanAParser.DeclContext; +import static org.elasticsearch.plan.a.PlanAParser.DeclarationContext; +import static org.elasticsearch.plan.a.PlanAParser.DecltypeContext; +import static org.elasticsearch.plan.a.PlanAParser.DeclvarContext; +import static org.elasticsearch.plan.a.PlanAParser.DoContext; +import static org.elasticsearch.plan.a.PlanAParser.EmptyContext; +import static org.elasticsearch.plan.a.PlanAParser.ExprContext; +import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtcallContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtcastContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtdotContext; +import static org.elasticsearch.plan.a.PlanAParser.ExternalContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtfieldContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtnewContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtprecContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtstartContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtstringContext; +import static org.elasticsearch.plan.a.PlanAParser.ExttypeContext; +import static org.elasticsearch.plan.a.PlanAParser.ExtvarContext; +import static org.elasticsearch.plan.a.PlanAParser.FalseContext; +import static org.elasticsearch.plan.a.PlanAParser.ForContext; +import static org.elasticsearch.plan.a.PlanAParser.IfContext; +import static org.elasticsearch.plan.a.PlanAParser.IncrementContext; +import static org.elasticsearch.plan.a.PlanAParser.InitializerContext; +import static org.elasticsearch.plan.a.PlanAParser.LSH; +import static org.elasticsearch.plan.a.PlanAParser.MUL; +import static org.elasticsearch.plan.a.PlanAParser.MultipleContext; +import static org.elasticsearch.plan.a.PlanAParser.NullContext; +import static org.elasticsearch.plan.a.PlanAParser.NumericContext; +import static org.elasticsearch.plan.a.PlanAParser.PostincContext; +import static org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; +import static org.elasticsearch.plan.a.PlanAParser.PreincContext; +import static org.elasticsearch.plan.a.PlanAParser.REM; +import static org.elasticsearch.plan.a.PlanAParser.RSH; +import static org.elasticsearch.plan.a.PlanAParser.ReturnContext; +import static org.elasticsearch.plan.a.PlanAParser.SUB; +import static org.elasticsearch.plan.a.PlanAParser.SingleContext; +import static org.elasticsearch.plan.a.PlanAParser.SourceContext; +import static org.elasticsearch.plan.a.PlanAParser.StatementContext; +import static org.elasticsearch.plan.a.PlanAParser.TrueContext; +import static org.elasticsearch.plan.a.PlanAParser.USH; +import static org.elasticsearch.plan.a.PlanAParser.UnaryContext; +import static org.elasticsearch.plan.a.PlanAParser.WhileContext; class Writer extends PlanAParserBaseVisitor { private static class Branch { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java index af7eb25a6c0..d6e05f973a2 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java @@ -19,11 +19,6 @@ package org.elasticsearch.plan.a; -import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodType; -import java.util.HashMap; -import java.util.Map; - /** Tests for addition operator across all types */ //TODO: NaN/Inf/overflow/... public class AdditionTests extends ScriptTestCase { @@ -40,7 +35,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals(0+0, exec("int x = 0; int y = 0; return x+y;")); assertEquals(0+0, exec("int x = 0; int y = 0; return x+y;")); } - + public void testIntConst() throws Exception { assertEquals(1+1, exec("return 1+1;")); assertEquals(1+2, exec("return 1+2;")); @@ -52,7 +47,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals(1+0, exec("return 1+0;")); assertEquals(0+0, exec("return 0+0;")); } - + public void testByte() throws Exception { assertEquals((byte)1+(byte)1, exec("byte x = 1; byte y = 1; return x+y;")); assertEquals((byte)1+(byte)2, exec("byte x = 1; byte y = 2; return x+y;")); @@ -64,7 +59,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((byte)1+(byte)0, exec("byte x = 1; byte y = 0; return x+y;")); assertEquals((byte)0+(byte)0, exec("byte x = 0; byte y = 0; return x+y;")); } - + public void testByteConst() throws Exception { assertEquals((byte)1+(byte)1, exec("return (byte)1+(byte)1;")); assertEquals((byte)1+(byte)2, exec("return (byte)1+(byte)2;")); @@ -76,7 +71,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((byte)1+(byte)0, exec("return (byte)1+(byte)0;")); assertEquals((byte)0+(byte)0, exec("return (byte)0+(byte)0;")); } - + public void testChar() throws Exception { assertEquals((char)1+(char)1, exec("char x = 1; char y = 1; return x+y;")); assertEquals((char)1+(char)2, exec("char x = 1; char y = 2; return x+y;")); @@ -88,7 +83,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((char)1+(char)0, exec("char x = 1; char y = 0; return x+y;")); assertEquals((char)0+(char)0, exec("char x = 0; char y = 0; return x+y;")); } - + public void testCharConst() throws Exception { assertEquals((char)1+(char)1, exec("return (char)1+(char)1;")); assertEquals((char)1+(char)2, exec("return (char)1+(char)2;")); @@ -100,7 +95,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((char)1+(char)0, exec("return (char)1+(char)0;")); assertEquals((char)0+(char)0, exec("return (char)0+(char)0;")); } - + public void testShort() throws Exception { assertEquals((short)1+(short)1, exec("short x = 1; short y = 1; return x+y;")); assertEquals((short)1+(short)2, exec("short x = 1; short y = 2; return x+y;")); @@ -112,7 +107,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((short)1+(short)0, exec("short x = 1; short y = 0; return x+y;")); assertEquals((short)0+(short)0, exec("short x = 0; short y = 0; return x+y;")); } - + public void testShortConst() throws Exception { assertEquals((short)1+(short)1, exec("return (short)1+(short)1;")); assertEquals((short)1+(short)2, exec("return (short)1+(short)2;")); @@ -124,7 +119,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals((short)1+(short)0, exec("return (short)1+(short)0;")); assertEquals((short)0+(short)0, exec("return (short)0+(short)0;")); } - + public void testLong() throws Exception { assertEquals(1L+1L, exec("long x = 1; long y = 1; return x+y;")); assertEquals(1L+2L, exec("long x = 1; long y = 2; return x+y;")); @@ -136,7 +131,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals(1L+0L, exec("long x = 1; long y = 0; return x+y;")); assertEquals(0L+0L, exec("long x = 0; long y = 0; return x+y;")); } - + public void testLongConst() throws Exception { assertEquals(1L+1L, exec("return 1L+1L;")); assertEquals(1L+2L, exec("return 1L+2L;")); @@ -184,7 +179,7 @@ public class AdditionTests extends ScriptTestCase { assertEquals(1.0+0.0, exec("double x = 1.0; double y = 0.0; return x+y;")); assertEquals(0.0+0.0, exec("double x = 0.0; double y = 0.0; return x+y;")); } - + public void testDoubleConst() throws Exception { assertEquals(1.0+1.0, exec("return 1.0+1.0;")); assertEquals(1.0+2.0, exec("return 1.0+2.0;")); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java index 94beac0c58c..4603a669df2 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java @@ -19,17 +19,16 @@ package org.elasticsearch.plan.a; -import org.elasticsearch.common.settings.Settings; +import java.util.Collections; +import java.util.Map; /** Tests floating point overflow with numeric overflow disabled */ public class FloatOverflowDisabledTests extends ScriptTestCase { + /** wire overflow to false for all tests */ @Override - protected Settings getSettings() { - Settings.Builder builder = Settings.builder(); - builder.put(super.getSettings()); - builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, false); - return builder.build(); + public Object exec(String script, Map vars) { + return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "false")); } public void testAssignmentAdditionOverflow() { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java index ff1c315628f..02a738de71e 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java @@ -19,17 +19,16 @@ package org.elasticsearch.plan.a; -import org.elasticsearch.common.settings.Settings; +import java.util.Collections; +import java.util.Map; /** Tests floating point overflow with numeric overflow enabled */ public class FloatOverflowEnabledTests extends ScriptTestCase { + /** wire overflow to true for all tests */ @Override - protected Settings getSettings() { - Settings.Builder builder = Settings.builder(); - builder.put(super.getSettings()); - builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, true); - return builder.build(); + public Object exec(String script, Map vars) { + return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "true")); } public void testAssignmentAdditionOverflow() { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java index 279ea0616d9..dbffb11f0d0 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java @@ -19,17 +19,16 @@ package org.elasticsearch.plan.a; -import org.elasticsearch.common.settings.Settings; +import java.util.Collections; +import java.util.Map; /** Tests integer overflow with numeric overflow disabled */ public class IntegerOverflowDisabledTests extends ScriptTestCase { + /** wire overflow to true for all tests */ @Override - protected Settings getSettings() { - Settings.Builder builder = Settings.builder(); - builder.put(super.getSettings()); - builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, false); - return builder.build(); + public Object exec(String script, Map vars) { + return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "false")); } public void testAssignmentAdditionOverflow() { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java index 8abd2695915..cdab0e89fe6 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java @@ -19,17 +19,16 @@ package org.elasticsearch.plan.a; -import org.elasticsearch.common.settings.Settings; +import java.util.Collections; +import java.util.Map; /** Tests integer overflow with numeric overflow enabled */ public class IntegerOverflowEnabledTests extends ScriptTestCase { + /** wire overflow to true for all tests */ @Override - protected Settings getSettings() { - Settings.Builder builder = Settings.builder(); - builder.put(super.getSettings()); - builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, true); - return builder.build(); + public Object exec(String script, Map vars) { + return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "true")); } public void testAssignmentAdditionOverflow() { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java index d2bbe02a625..e5084392f99 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -78,7 +79,7 @@ public class ScriptEngineTests extends ScriptTestCase { Map ctx = new HashMap<>(); vars.put("ctx", ctx); - Object compiledScript = scriptEngine.compile("return ((Map)input.get(\"ctx\")).get(\"value\");"); + Object compiledScript = scriptEngine.compile("return ((Map)input.get(\"ctx\")).get(\"value\");", Collections.emptyMap()); ExecutableScript script = scriptEngine.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution1", "plan-a", compiledScript), vars); @@ -93,7 +94,7 @@ public class ScriptEngineTests extends ScriptTestCase { public void testChangingVarsCrossExecution2() { Map vars = new HashMap<>(); - Object compiledScript = scriptEngine.compile("return input.get(\"value\");"); + Object compiledScript = scriptEngine.compile("return input.get(\"value\");", Collections.emptyMap()); ExecutableScript script = scriptEngine.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution2", "plan-a", compiledScript), vars); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java index 253e37183f3..5b4948036f3 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.util.Collections; import java.util.Map; /** @@ -34,17 +35,10 @@ import java.util.Map; */ public abstract class ScriptTestCase extends ESTestCase { protected PlanAScriptEngineService scriptEngine; - - /** Override to provide different compiler settings */ - protected Settings getSettings() { - Settings.Builder builder = Settings.builder(); - builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, random().nextBoolean()); - return builder.build(); - } @Before public void setup() { - scriptEngine = new PlanAScriptEngineService(getSettings()); + scriptEngine = new PlanAScriptEngineService(Settings.EMPTY); } /** Compiles and returns the result of {@code script} */ @@ -54,7 +48,12 @@ public abstract class ScriptTestCase extends ESTestCase { /** Compiles and returns the result of {@code script} with access to {@code vars} */ public Object exec(String script, Map vars) { - Object object = scriptEngine.compile(script); + return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, Boolean.toString(random().nextBoolean()))); + } + + /** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */ + public Object exec(String script, Map vars, Map compileParams) { + Object object = scriptEngine.compile(script, compileParams); CompiledScript compiled = new CompiledScript(ScriptService.ScriptType.INLINE, getTestName(), "plan-a", object); return scriptEngine.executable(compiled, vars).run(); } diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java index de2c1c9ea3e..277778e7e76 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.plan.a; +import java.util.Collections; + public class WhenThingsGoWrongTests extends ScriptTestCase { public void testNullPointer() { try { @@ -38,4 +40,13 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { fail("should have hit cce"); } catch (ClassCastException expected) {} } + + public void testBogusParameter() { + try { + exec("return 5;", null, Collections.singletonMap("bogusParameterKey", "bogusParameterValue")); + fail("should have hit IAE"); + } catch (IllegalArgumentException expected) { + assertTrue(expected.getMessage().contains("Unrecognized compile-time parameter")); + } + } } diff --git a/plugins/lang-python/build.gradle b/plugins/lang-python/build.gradle index 269a3249386..103a15784ea 100644 --- a/plugins/lang-python/build.gradle +++ b/plugins/lang-python/build.gradle @@ -36,3 +36,493 @@ integTest { } } +thirdPartyAudit.excludes = [ + // uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) + 'org.python.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + + // uses internal java api: sun.misc.Cleaner + 'org.python.netty.util.internal.Cleaner0', + + // uses internal java api: sun.misc.Signal + 'jnr.posix.JavaPOSIX', + 'jnr.posix.JavaPOSIX$SunMiscSignalHandler', + + // uses internal java api: sun.misc.Unsafe + 'com.kenai.jffi.MemoryIO$UnsafeImpl', + 'com.kenai.jffi.MemoryIO$UnsafeImpl32', + 'com.kenai.jffi.MemoryIO$UnsafeImpl64', + 'org.python.google.common.cache.Striped64', + 'org.python.google.common.cache.Striped64$1', + 'org.python.google.common.cache.Striped64$Cell', + 'org.python.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.python.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.python.netty.util.internal.chmv8.ForkJoinPool$2', + 'org.python.netty.util.internal.PlatformDependent0', + 'org.python.netty.util.internal.UnsafeAtomicIntegerFieldUpdater', + 'org.python.netty.util.internal.UnsafeAtomicLongFieldUpdater', + 'org.python.netty.util.internal.UnsafeAtomicReferenceFieldUpdater', + 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8', + 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8$1', + 'org.python.netty.util.internal.chmv8.ConcurrentHashMapV8$TreeBin', + 'org.python.netty.util.internal.chmv8.CountedCompleter', + 'org.python.netty.util.internal.chmv8.CountedCompleter$1', + 'org.python.netty.util.internal.chmv8.ForkJoinPool', + 'org.python.netty.util.internal.chmv8.ForkJoinPool$WorkQueue', + 'org.python.netty.util.internal.chmv8.ForkJoinTask', + 'org.python.netty.util.internal.chmv8.ForkJoinTask$1', + + // "uberjaring" (but not shading) classes that have been in the JDK since 1.5 + // nice job python. + 'javax.xml.XMLConstants', + 'javax.xml.datatype.DatatypeConfigurationException', + 'javax.xml.datatype.DatatypeConstants$1', + 'javax.xml.datatype.DatatypeConstants$Field', + 'javax.xml.datatype.DatatypeConstants', + 'javax.xml.datatype.DatatypeFactory', + 'javax.xml.datatype.Duration', + 'javax.xml.datatype.FactoryFinder', + 'javax.xml.datatype.SecuritySupport$1', + 'javax.xml.datatype.SecuritySupport$2', + 'javax.xml.datatype.SecuritySupport$3', + 'javax.xml.datatype.SecuritySupport$4', + 'javax.xml.datatype.SecuritySupport$5', + 'javax.xml.datatype.SecuritySupport', + 'javax.xml.datatype.XMLGregorianCalendar', + 'javax.xml.namespace.NamespaceContext', + 'javax.xml.namespace.QName$1', + 'javax.xml.namespace.QName', + 'javax.xml.parsers.DocumentBuilder', + 'javax.xml.parsers.DocumentBuilderFactory', + 'javax.xml.parsers.FactoryConfigurationError', + 'javax.xml.parsers.FactoryFinder', + 'javax.xml.parsers.ParserConfigurationException', + 'javax.xml.parsers.SAXParser', + 'javax.xml.parsers.SAXParserFactory', + 'javax.xml.parsers.SecuritySupport$1', + 'javax.xml.parsers.SecuritySupport$2', + 'javax.xml.parsers.SecuritySupport$3', + 'javax.xml.parsers.SecuritySupport$4', + 'javax.xml.parsers.SecuritySupport$5', + 'javax.xml.parsers.SecuritySupport', + 'javax.xml.stream.EventFilter', + 'javax.xml.stream.FactoryConfigurationError', + 'javax.xml.stream.FactoryFinder', + 'javax.xml.stream.Location', + 'javax.xml.stream.SecuritySupport$1', + 'javax.xml.stream.SecuritySupport$2', + 'javax.xml.stream.SecuritySupport$3', + 'javax.xml.stream.SecuritySupport$4', + 'javax.xml.stream.SecuritySupport$5', + 'javax.xml.stream.SecuritySupport', + 'javax.xml.stream.StreamFilter', + 'javax.xml.stream.XMLEventFactory', + 'javax.xml.stream.XMLEventReader', + 'javax.xml.stream.XMLEventWriter', + 'javax.xml.stream.XMLInputFactory', + 'javax.xml.stream.XMLOutputFactory', + 'javax.xml.stream.XMLReporter', + 'javax.xml.stream.XMLResolver', + 'javax.xml.stream.XMLStreamConstants', + 'javax.xml.stream.XMLStreamException', + 'javax.xml.stream.XMLStreamReader', + 'javax.xml.stream.XMLStreamWriter', + 'javax.xml.stream.events.Attribute', + 'javax.xml.stream.events.Characters', + 'javax.xml.stream.events.Comment', + 'javax.xml.stream.events.DTD', + 'javax.xml.stream.events.EndDocument', + 'javax.xml.stream.events.EndElement', + 'javax.xml.stream.events.EntityDeclaration', + 'javax.xml.stream.events.EntityReference', + 'javax.xml.stream.events.Namespace', + 'javax.xml.stream.events.NotationDeclaration', + 'javax.xml.stream.events.ProcessingInstruction', + 'javax.xml.stream.events.StartDocument', + 'javax.xml.stream.events.StartElement', + 'javax.xml.stream.events.XMLEvent', + 'javax.xml.stream.util.EventReaderDelegate', + 'javax.xml.stream.util.StreamReaderDelegate', + 'javax.xml.stream.util.XMLEventAllocator', + 'javax.xml.stream.util.XMLEventConsumer', + 'javax.xml.transform.ErrorListener', + 'javax.xml.transform.FactoryFinder', + 'javax.xml.transform.OutputKeys', + 'javax.xml.transform.Result', + 'javax.xml.transform.SecuritySupport$1', + 'javax.xml.transform.SecuritySupport$2', + 'javax.xml.transform.SecuritySupport$3', + 'javax.xml.transform.SecuritySupport$4', + 'javax.xml.transform.SecuritySupport$5', + 'javax.xml.transform.SecuritySupport', + 'javax.xml.transform.Source', + 'javax.xml.transform.SourceLocator', + 'javax.xml.transform.Templates', + 'javax.xml.transform.Transformer', + 'javax.xml.transform.TransformerConfigurationException', + 'javax.xml.transform.TransformerException', + 'javax.xml.transform.TransformerFactory', + 'javax.xml.transform.TransformerFactoryConfigurationError', + 'javax.xml.transform.URIResolver', + 'javax.xml.transform.dom.DOMLocator', + 'javax.xml.transform.dom.DOMResult', + 'javax.xml.transform.dom.DOMSource', + 'javax.xml.transform.sax.SAXResult', + 'javax.xml.transform.sax.SAXSource', + 'javax.xml.transform.sax.SAXTransformerFactory', + 'javax.xml.transform.sax.TemplatesHandler', + 'javax.xml.transform.sax.TransformerHandler', + 'javax.xml.transform.stax.StAXResult', + 'javax.xml.transform.stax.StAXSource', + 'javax.xml.transform.stream.StreamResult', + 'javax.xml.transform.stream.StreamSource', + 'javax.xml.validation.Schema', + 'javax.xml.validation.SchemaFactory', + 'javax.xml.validation.SchemaFactoryFinder$1', + 'javax.xml.validation.SchemaFactoryFinder$2', + 'javax.xml.validation.SchemaFactoryFinder', + 'javax.xml.validation.SchemaFactoryLoader', + 'javax.xml.validation.SecuritySupport$1', + 'javax.xml.validation.SecuritySupport$2', + 'javax.xml.validation.SecuritySupport$3', + 'javax.xml.validation.SecuritySupport$4', + 'javax.xml.validation.SecuritySupport$5', + 'javax.xml.validation.SecuritySupport$6', + 'javax.xml.validation.SecuritySupport$7', + 'javax.xml.validation.SecuritySupport$8', + 'javax.xml.validation.SecuritySupport', + 'javax.xml.validation.TypeInfoProvider', + 'javax.xml.validation.Validator', + 'javax.xml.validation.ValidatorHandler', + 'javax.xml.xpath.SecuritySupport$1', + 'javax.xml.xpath.SecuritySupport$2', + 'javax.xml.xpath.SecuritySupport$3', + 'javax.xml.xpath.SecuritySupport$4', + 'javax.xml.xpath.SecuritySupport$5', + 'javax.xml.xpath.SecuritySupport$6', + 'javax.xml.xpath.SecuritySupport$7', + 'javax.xml.xpath.SecuritySupport$8', + 'javax.xml.xpath.SecuritySupport', + 'javax.xml.xpath.XPath', + 'javax.xml.xpath.XPathConstants', + 'javax.xml.xpath.XPathException', + 'javax.xml.xpath.XPathExpression', + 'javax.xml.xpath.XPathExpressionException', + 'javax.xml.xpath.XPathFactory', + 'javax.xml.xpath.XPathFactoryConfigurationException', + 'javax.xml.xpath.XPathFactoryFinder$1', + 'javax.xml.xpath.XPathFactoryFinder$2', + 'javax.xml.xpath.XPathFactoryFinder', + 'javax.xml.xpath.XPathFunction', + 'javax.xml.xpath.XPathFunctionException', + 'javax.xml.xpath.XPathFunctionResolver', + 'javax.xml.xpath.XPathVariableResolver', + 'org.w3c.dom.Attr', + 'org.w3c.dom.CDATASection', + 'org.w3c.dom.CharacterData', + 'org.w3c.dom.Comment', + 'org.w3c.dom.DOMConfiguration', + 'org.w3c.dom.DOMError', + 'org.w3c.dom.DOMErrorHandler', + 'org.w3c.dom.DOMException', + 'org.w3c.dom.DOMImplementation', + 'org.w3c.dom.DOMImplementationList', + 'org.w3c.dom.DOMImplementationSource', + 'org.w3c.dom.DOMLocator', + 'org.w3c.dom.DOMStringList', + 'org.w3c.dom.Document', + 'org.w3c.dom.DocumentFragment', + 'org.w3c.dom.DocumentType', + 'org.w3c.dom.Element', + 'org.w3c.dom.Entity', + 'org.w3c.dom.EntityReference', + 'org.w3c.dom.NameList', + 'org.w3c.dom.NamedNodeMap', + 'org.w3c.dom.Node', + 'org.w3c.dom.NodeList', + 'org.w3c.dom.Notation', + 'org.w3c.dom.ProcessingInstruction', + 'org.w3c.dom.Text', + 'org.w3c.dom.TypeInfo', + 'org.w3c.dom.UserDataHandler', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$1', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$2', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$3', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry$4', + 'org.w3c.dom.bootstrap.DOMImplementationRegistry', + 'org.w3c.dom.css.CSS2Properties', + 'org.w3c.dom.css.CSSCharsetRule', + 'org.w3c.dom.css.CSSFontFaceRule', + 'org.w3c.dom.css.CSSImportRule', + 'org.w3c.dom.css.CSSMediaRule', + 'org.w3c.dom.css.CSSPageRule', + 'org.w3c.dom.css.CSSPrimitiveValue', + 'org.w3c.dom.css.CSSRule', + 'org.w3c.dom.css.CSSRuleList', + 'org.w3c.dom.css.CSSStyleDeclaration', + 'org.w3c.dom.css.CSSStyleRule', + 'org.w3c.dom.css.CSSStyleSheet', + 'org.w3c.dom.css.CSSUnknownRule', + 'org.w3c.dom.css.CSSValue', + 'org.w3c.dom.css.CSSValueList', + 'org.w3c.dom.css.Counter', + 'org.w3c.dom.css.DOMImplementationCSS', + 'org.w3c.dom.css.DocumentCSS', + 'org.w3c.dom.css.ElementCSSInlineStyle', + 'org.w3c.dom.css.RGBColor', + 'org.w3c.dom.css.Rect', + 'org.w3c.dom.css.ViewCSS', + 'org.w3c.dom.events.DocumentEvent', + 'org.w3c.dom.events.Event', + 'org.w3c.dom.events.EventException', + 'org.w3c.dom.events.EventListener', + 'org.w3c.dom.events.EventTarget', + 'org.w3c.dom.events.MouseEvent', + 'org.w3c.dom.events.MutationEvent', + 'org.w3c.dom.events.UIEvent', + 'org.w3c.dom.html.HTMLAnchorElement', + 'org.w3c.dom.html.HTMLAppletElement', + 'org.w3c.dom.html.HTMLAreaElement', + 'org.w3c.dom.html.HTMLBRElement', + 'org.w3c.dom.html.HTMLBaseElement', + 'org.w3c.dom.html.HTMLBaseFontElement', + 'org.w3c.dom.html.HTMLBodyElement', + 'org.w3c.dom.html.HTMLButtonElement', + 'org.w3c.dom.html.HTMLCollection', + 'org.w3c.dom.html.HTMLDListElement', + 'org.w3c.dom.html.HTMLDOMImplementation', + 'org.w3c.dom.html.HTMLDirectoryElement', + 'org.w3c.dom.html.HTMLDivElement', + 'org.w3c.dom.html.HTMLDocument', + 'org.w3c.dom.html.HTMLElement', + 'org.w3c.dom.html.HTMLFieldSetElement', + 'org.w3c.dom.html.HTMLFontElement', + 'org.w3c.dom.html.HTMLFormElement', + 'org.w3c.dom.html.HTMLFrameElement', + 'org.w3c.dom.html.HTMLFrameSetElement', + 'org.w3c.dom.html.HTMLHRElement', + 'org.w3c.dom.html.HTMLHeadElement', + 'org.w3c.dom.html.HTMLHeadingElement', + 'org.w3c.dom.html.HTMLHtmlElement', + 'org.w3c.dom.html.HTMLIFrameElement', + 'org.w3c.dom.html.HTMLImageElement', + 'org.w3c.dom.html.HTMLInputElement', + 'org.w3c.dom.html.HTMLIsIndexElement', + 'org.w3c.dom.html.HTMLLIElement', + 'org.w3c.dom.html.HTMLLabelElement', + 'org.w3c.dom.html.HTMLLegendElement', + 'org.w3c.dom.html.HTMLLinkElement', + 'org.w3c.dom.html.HTMLMapElement', + 'org.w3c.dom.html.HTMLMenuElement', + 'org.w3c.dom.html.HTMLMetaElement', + 'org.w3c.dom.html.HTMLModElement', + 'org.w3c.dom.html.HTMLOListElement', + 'org.w3c.dom.html.HTMLObjectElement', + 'org.w3c.dom.html.HTMLOptGroupElement', + 'org.w3c.dom.html.HTMLOptionElement', + 'org.w3c.dom.html.HTMLParagraphElement', + 'org.w3c.dom.html.HTMLParamElement', + 'org.w3c.dom.html.HTMLPreElement', + 'org.w3c.dom.html.HTMLQuoteElement', + 'org.w3c.dom.html.HTMLScriptElement', + 'org.w3c.dom.html.HTMLSelectElement', + 'org.w3c.dom.html.HTMLStyleElement', + 'org.w3c.dom.html.HTMLTableCaptionElement', + 'org.w3c.dom.html.HTMLTableCellElement', + 'org.w3c.dom.html.HTMLTableColElement', + 'org.w3c.dom.html.HTMLTableElement', + 'org.w3c.dom.html.HTMLTableRowElement', + 'org.w3c.dom.html.HTMLTableSectionElement', + 'org.w3c.dom.html.HTMLTextAreaElement', + 'org.w3c.dom.html.HTMLTitleElement', + 'org.w3c.dom.html.HTMLUListElement', + 'org.w3c.dom.ls.DOMImplementationLS', + 'org.w3c.dom.ls.LSException', + 'org.w3c.dom.ls.LSInput', + 'org.w3c.dom.ls.LSLoadEvent', + 'org.w3c.dom.ls.LSOutput', + 'org.w3c.dom.ls.LSParser', + 'org.w3c.dom.ls.LSParserFilter', + 'org.w3c.dom.ls.LSProgressEvent', + 'org.w3c.dom.ls.LSResourceResolver', + 'org.w3c.dom.ls.LSSerializer', + 'org.w3c.dom.ls.LSSerializerFilter', + 'org.w3c.dom.ranges.DocumentRange', + 'org.w3c.dom.ranges.Range', + 'org.w3c.dom.ranges.RangeException', + 'org.w3c.dom.stylesheets.DocumentStyle', + 'org.w3c.dom.stylesheets.LinkStyle', + 'org.w3c.dom.stylesheets.MediaList', + 'org.w3c.dom.stylesheets.StyleSheet', + 'org.w3c.dom.stylesheets.StyleSheetList', + 'org.w3c.dom.traversal.DocumentTraversal', + 'org.w3c.dom.traversal.NodeFilter', + 'org.w3c.dom.traversal.NodeIterator', + 'org.w3c.dom.traversal.TreeWalker', + 'org.w3c.dom.views.AbstractView', + 'org.w3c.dom.views.DocumentView', + 'org.w3c.dom.xpath.XPathEvaluator', + 'org.w3c.dom.xpath.XPathException', + 'org.w3c.dom.xpath.XPathExpression', + 'org.w3c.dom.xpath.XPathNSResolver', + 'org.w3c.dom.xpath.XPathNamespace', + 'org.w3c.dom.xpath.XPathResult', + 'org.xml.sax.AttributeList', + 'org.xml.sax.Attributes', + 'org.xml.sax.ContentHandler', + 'org.xml.sax.DTDHandler', + 'org.xml.sax.DocumentHandler', + 'org.xml.sax.EntityResolver', + 'org.xml.sax.ErrorHandler', + 'org.xml.sax.HandlerBase', + 'org.xml.sax.InputSource', + 'org.xml.sax.Locator', + 'org.xml.sax.Parser', + 'org.xml.sax.SAXException', + 'org.xml.sax.SAXNotRecognizedException', + 'org.xml.sax.SAXNotSupportedException', + 'org.xml.sax.SAXParseException', + 'org.xml.sax.XMLFilter', + 'org.xml.sax.XMLReader', + 'org.xml.sax.ext.Attributes2', + 'org.xml.sax.ext.Attributes2Impl', + 'org.xml.sax.ext.DeclHandler', + 'org.xml.sax.ext.DefaultHandler2', + 'org.xml.sax.ext.EntityResolver2', + 'org.xml.sax.ext.LexicalHandler', + 'org.xml.sax.ext.Locator2', + 'org.xml.sax.ext.Locator2Impl', + 'org.xml.sax.helpers.AttributeListImpl', + 'org.xml.sax.helpers.AttributesImpl', + 'org.xml.sax.helpers.DefaultHandler', + 'org.xml.sax.helpers.LocatorImpl', + 'org.xml.sax.helpers.NamespaceSupport$Context', + 'org.xml.sax.helpers.NamespaceSupport', + 'org.xml.sax.helpers.NewInstance', + 'org.xml.sax.helpers.ParserAdapter$AttributeListAdapter', + 'org.xml.sax.helpers.ParserAdapter', + 'org.xml.sax.helpers.ParserFactory', + 'org.xml.sax.helpers.SecuritySupport$1', + 'org.xml.sax.helpers.SecuritySupport$2', + 'org.xml.sax.helpers.SecuritySupport$3', + 'org.xml.sax.helpers.SecuritySupport$4', + 'org.xml.sax.helpers.SecuritySupport', + 'org.xml.sax.helpers.XMLFilterImpl', + 'org.xml.sax.helpers.XMLReaderAdapter$AttributesAdapter', + 'org.xml.sax.helpers.XMLReaderAdapter', + 'org.xml.sax.helpers.XMLReaderFactory', + + // classes are missing + 'com.jcraft.jzlib.Deflater', + 'com.jcraft.jzlib.Inflater', + 'com.jcraft.jzlib.JZlib$WrapperType', + 'com.jcraft.jzlib.JZlib', + 'javassist.ClassClassPath', + 'javassist.ClassPath', + 'javassist.ClassPool', + 'javassist.CtClass', + 'javassist.CtMethod', + 'javax.servlet.Filter', + 'javax.servlet.FilterChain', + 'javax.servlet.FilterConfig', + 'javax.servlet.ServletConfig', + 'javax.servlet.ServletContext', + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'javax.servlet.ServletException', + 'javax.servlet.ServletRequest', + 'javax.servlet.ServletResponse', + 'javax.servlet.http.HttpServlet', + 'javax.servlet.http.HttpServletRequest', + 'javax.servlet.http.HttpServletResponse', + 'jnr.x86asm.Asm', + 'jnr.x86asm.Assembler', + 'jnr.x86asm.CPU', + 'jnr.x86asm.Mem', + 'jnr.x86asm.Register', + 'junit.framework.Assert', + 'junit.framework.TestCase', + 'org.antlr.stringtemplate.StringTemplate', + 'org.eclipse.jetty.alpn.ALPN$ClientProvider', + 'org.eclipse.jetty.alpn.ALPN$ServerProvider', + 'org.eclipse.jetty.alpn.ALPN', + 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', + 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', + 'org.eclipse.jetty.npn.NextProtoNego', + 'org.jboss.marshalling.ByteInput', + 'org.jboss.marshalling.ByteOutput', + 'org.jboss.marshalling.Marshaller', + 'org.jboss.marshalling.MarshallerFactory', + 'org.jboss.marshalling.MarshallingConfiguration', + 'org.jboss.marshalling.Unmarshaller', + 'org.junit.Assert', + 'org.junit.internal.matchers.CombinableMatcher', + 'org.junit.matchers.JUnitMatchers', + 'org.junit.runner.JUnitCore', + 'org.python.apache.commons.logging.Log', + 'org.python.apache.commons.logging.LogFactory', + 'org.python.apache.log4j.Level', + 'org.python.apache.log4j.Logger', + 'org.python.apache.tomcat.jni.Buffer', + 'org.python.apache.tomcat.jni.CertificateVerifier', + 'org.python.apache.tomcat.jni.Library', + 'org.python.apache.tomcat.jni.Pool', + 'org.python.apache.tomcat.jni.SSL', + 'org.python.apache.tomcat.jni.SSLContext', + 'org.python.apache.tools.ant.BuildException', + 'org.python.apache.tools.ant.DirectoryScanner', + 'org.python.apache.tools.ant.Project', + 'org.python.apache.tools.ant.taskdefs.Execute', + 'org.python.apache.tools.ant.taskdefs.Java', + 'org.python.apache.tools.ant.taskdefs.MatchingTask', + 'org.python.apache.tools.ant.types.Commandline$Argument', + 'org.python.apache.tools.ant.types.Path', + 'org.python.apache.tools.ant.types.Resource', + 'org.python.apache.tools.ant.types.ResourceCollection', + 'org.python.apache.tools.ant.types.resources.BaseResourceCollectionContainer', + 'org.python.apache.tools.ant.util.GlobPatternMapper', + 'org.python.apache.tools.ant.util.SourceFileScanner', + 'org.python.apache.xml.resolver.Catalog', + 'org.python.apache.xml.resolver.CatalogManager', + 'org.python.apache.xml.resolver.readers.SAXCatalogReader', + 'org.python.google.protobuf.CodedInputStream', + 'org.python.google.protobuf.CodedOutputStream', + 'org.python.google.protobuf.ExtensionRegistry', + 'org.python.google.protobuf.ExtensionRegistryLite', + 'org.python.google.protobuf.MessageLite$Builder', + 'org.python.google.protobuf.MessageLite', + 'org.python.google.protobuf.MessageLiteOrBuilder', + 'org.python.google.protobuf.Parser', + 'org.python.objectweb.asm.tree.AbstractInsnNode', + 'org.python.objectweb.asm.tree.ClassNode', + 'org.python.objectweb.asm.tree.InsnList', + 'org.python.objectweb.asm.tree.InsnNode', + 'org.python.objectweb.asm.tree.JumpInsnNode', + 'org.python.objectweb.asm.tree.LabelNode', + 'org.python.objectweb.asm.tree.LocalVariableNode', + 'org.python.objectweb.asm.tree.LookupSwitchInsnNode', + 'org.python.objectweb.asm.tree.MethodNode', + 'org.python.objectweb.asm.tree.TableSwitchInsnNode', + 'org.python.objectweb.asm.tree.TryCatchBlockNode', + 'org.python.objectweb.asm.tree.analysis.Analyzer', + 'org.python.objectweb.asm.tree.analysis.BasicValue', + 'org.python.objectweb.asm.tree.analysis.BasicVerifier', + 'org.python.objectweb.asm.tree.analysis.Frame', + 'org.python.objectweb.asm.tree.analysis.SimpleVerifier', + 'org.tukaani.xz.ARMOptions', + 'org.tukaani.xz.ARMThumbOptions', + 'org.tukaani.xz.DeltaOptions', + 'org.tukaani.xz.FilterOptions', + 'org.tukaani.xz.FinishableWrapperOutputStream', + 'org.tukaani.xz.IA64Options', + 'org.tukaani.xz.LZMA2InputStream', + 'org.tukaani.xz.LZMA2Options', + 'org.tukaani.xz.LZMAInputStream', + 'org.tukaani.xz.PowerPCOptions', + 'org.tukaani.xz.SPARCOptions', + 'org.tukaani.xz.SingleXZInputStream', + 'org.tukaani.xz.UnsupportedOptionsException', + 'org.tukaani.xz.X86Options', + 'org.tukaani.xz.XZ', + 'org.tukaani.xz.XZInputStream', + 'org.tukaani.xz.XZOutputStream', +] diff --git a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java index 1930f530671..3722709e420 100644 --- a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java +++ b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java @@ -19,18 +19,6 @@ package org.elasticsearch.script.python; -import java.io.IOException; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.Permissions; -import java.security.PrivilegedAction; -import java.security.ProtectionDomain; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.SpecialPermission; @@ -53,6 +41,14 @@ import org.python.core.PyObject; import org.python.core.PyStringMap; import org.python.util.PythonInterpreter; +import java.io.IOException; +import java.security.AccessControlContext; +import java.security.AccessController; +import java.security.Permissions; +import java.security.PrivilegedAction; +import java.security.ProtectionDomain; +import java.util.Map; + /** * */ @@ -60,7 +56,7 @@ import org.python.util.PythonInterpreter; public class PythonScriptEngineService extends AbstractComponent implements ScriptEngineService { private final PythonInterpreter interp; - + @Inject public PythonScriptEngineService(Settings settings) { super(settings); @@ -110,7 +106,7 @@ public class PythonScriptEngineService extends AbstractComponent implements Scri } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { // classloader created here SecurityManager sm = System.getSecurityManager(); if (sm != null) { @@ -293,7 +289,7 @@ public class PythonScriptEngineService extends AbstractComponent implements Scri if (value == null) { return null; } else if (value instanceof PyObject) { - // seems like this is enough, inner PyDictionary will do the conversion for us for example, so expose it directly + // seems like this is enough, inner PyDictionary will do the conversion for us for example, so expose it directly return ((PyObject) value).__tojava__(Object.class); } return value; diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptEngineTests.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptEngineTests.java index e713bd67c92..a0bfab43c54 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptEngineTests.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptEngineTests.java @@ -29,6 +29,7 @@ import org.junit.After; import org.junit.Before; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -53,7 +54,7 @@ public class PythonScriptEngineTests extends ESTestCase { public void testSimpleEquation() { Map vars = new HashMap(); - Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testSimpleEquation", "python", se.compile("1 + 2")), vars).run(); + Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testSimpleEquation", "python", se.compile("1 + 2", Collections.emptyMap())), vars).run(); assertThat(((Number) o).intValue(), equalTo(3)); } @@ -63,13 +64,13 @@ public class PythonScriptEngineTests extends ESTestCase { Map obj2 = MapBuilder.newMapBuilder().put("prop2", "value2").map(); Map obj1 = MapBuilder.newMapBuilder().put("prop1", "value1").put("obj2", obj2).put("l", Arrays.asList("2", "1")).map(); vars.put("obj1", obj1); - Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "python", se.compile("obj1")), vars).run(); + Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "python", se.compile("obj1", Collections.emptyMap())), vars).run(); assertThat(o, instanceOf(Map.class)); obj1 = (Map) o; assertThat((String) obj1.get("prop1"), equalTo("value1")); assertThat((String) ((Map) obj1.get("obj2")).get("prop2"), equalTo("value2")); - o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "python", se.compile("obj1['l'][0]")), vars).run(); + o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testMapAccess", "python", se.compile("obj1['l'][0]", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("2")); } @@ -82,7 +83,7 @@ public class PythonScriptEngineTests extends ESTestCase { vars.put("ctx", ctx); ExecutableScript executable = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testObjectInterMap", "python", - se.compile("ctx['obj2'] = { 'prop2' : 'value2' }; ctx['obj1']['prop1'] = 'uvalue1'")), vars); + se.compile("ctx['obj2'] = { 'prop2' : 'value2' }; ctx['obj1']['prop1'] = 'uvalue1'", Collections.emptyMap())), vars); executable.run(); ctx = (Map) executable.unwrap(vars.get("ctx")); assertThat(ctx.containsKey("obj1"), equalTo(true)); @@ -100,15 +101,15 @@ public class PythonScriptEngineTests extends ESTestCase { // Object o = se.execute(se.compile("l.length"), vars); // assertThat(((Number) o).intValue(), equalTo(4)); - Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[0]")), vars).run(); + Object o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[0]", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("1")); - o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[3]")), vars).run(); + o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[3]", Collections.emptyMap())), vars).run(); obj1 = (Map) o; assertThat((String) obj1.get("prop1"), equalTo("value1")); assertThat((String) ((Map) obj1.get("obj2")).get("prop2"), equalTo("value2")); - o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[3]['prop1']")), vars).run(); + o = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testAccessListInScript", "python", se.compile("l[3]['prop1']", Collections.emptyMap())), vars).run(); assertThat(((String) o), equalTo("value1")); } @@ -116,7 +117,7 @@ public class PythonScriptEngineTests extends ESTestCase { Map vars = new HashMap(); Map ctx = new HashMap(); vars.put("ctx", ctx); - Object compiledScript = se.compile("ctx['value']"); + Object compiledScript = se.compile("ctx['value']", Collections.emptyMap()); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution1", "python", compiledScript), vars); ctx.put("value", 1); @@ -131,7 +132,7 @@ public class PythonScriptEngineTests extends ESTestCase { public void testChangingVarsCrossExecution2() { Map vars = new HashMap(); Map ctx = new HashMap(); - Object compiledScript = se.compile("value"); + Object compiledScript = se.compile("value", Collections.emptyMap()); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testChangingVarsCrossExecution2", "python", compiledScript), vars); script.setNextVar("value", 1); diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java index 7b9663f6b6a..06d3da03ab8 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -41,7 +42,7 @@ public class PythonScriptMultiThreadedTests extends ESTestCase { public void testExecutableNoRuntimeParams() throws Exception { final PythonScriptEngineService se = new PythonScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - final Object compiled = se.compile("x + y"); + final Object compiled = se.compile("x + y", Collections.emptyMap()); final CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "python", compiled); final AtomicBoolean failed = new AtomicBoolean(); @@ -127,7 +128,7 @@ public class PythonScriptMultiThreadedTests extends ESTestCase { public void testExecute() throws Exception { final PythonScriptEngineService se = new PythonScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - final Object compiled = se.compile("x + y"); + final Object compiled = se.compile("x + y", Collections.emptyMap()); final CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "testExecute", "python", compiled); final AtomicBoolean failed = new AtomicBoolean(); diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java index e90ac503f13..22471129e82 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.ESTestCase; import org.python.core.PyException; import java.text.DecimalFormatSymbols; +import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; @@ -34,7 +35,7 @@ import java.util.Map; * Tests for Python security permissions */ public class PythonSecurityTests extends ESTestCase { - + private PythonScriptEngineService se; @Override @@ -54,14 +55,14 @@ public class PythonSecurityTests extends ESTestCase { /** runs a script */ private void doTest(String script) { Map vars = new HashMap(); - se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "python", se.compile(script)), vars).run(); + se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "python", se.compile(script, Collections.emptyMap())), vars).run(); } - + /** asserts that a script runs without exception */ private void assertSuccess(String script) { doTest(script); } - + /** assert that a security exception is hit */ private void assertFailure(String script) { try { @@ -76,13 +77,13 @@ public class PythonSecurityTests extends ESTestCase { } } } - + /** Test some py scripts that are ok */ public void testOK() { assertSuccess("1 + 2"); assertSuccess("from java.lang import Math\nMath.cos(0)"); } - + /** Test some py scripts that should hit security exception */ public void testNotOK() { // sanity check :) @@ -93,7 +94,7 @@ public class PythonSecurityTests extends ESTestCase { // no files assertFailure("from java.io import File\nFile.createTempFile(\"test\", \"tmp\")"); } - + /** Test again from a new thread, python has complex threadlocal configuration */ public void testNotOKFromSeparateThread() throws Exception { Thread t = new Thread() { diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java index 60e792c34b5..d9559aef16c 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -35,7 +36,7 @@ public class SimpleBench { public static void main(String[] args) { PythonScriptEngineService se = new PythonScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - Object compiled = se.compile("x + y"); + Object compiled = se.compile("x + y", Collections.emptyMap()); CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "SimpleBench", "python", compiled); diff --git a/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml b/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml index 4f8926e0db6..6e6266ee9c9 100644 --- a/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml +++ b/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml @@ -18,9 +18,8 @@ id: 1 body: script: - script: - inline: "ctx[\"_source\"][\"myfield\"]=\"bar\"" - lang: python + inline: "ctx[\"_source\"][\"myfield\"]=\"bar\"" + lang: python - do: get: index: test @@ -48,9 +47,8 @@ id: 1 body: script: - script: - inline: "a=42; ctx[\"_source\"][\"myfield\"]=\"bar\"" - lang: python + inline: "a=42; ctx[\"_source\"][\"myfield\"]=\"bar\"" + lang: python - do: get: index: test diff --git a/plugins/mapper-attachments/build.gradle b/plugins/mapper-attachments/build.gradle index e14cf543043..bbe89aa1fd4 100644 --- a/plugins/mapper-attachments/build.gradle +++ b/plugins/mapper-attachments/build.gradle @@ -55,7 +55,6 @@ dependencies { compile "org.apache.poi:poi-ooxml-schemas:${versions.poi}" compile "commons-codec:commons-codec:${versions.commonscodec}" compile 'org.apache.xmlbeans:xmlbeans:2.6.0' - compile 'stax:stax-api:1.0.1' // MS Office compile "org.apache.poi:poi-scratchpad:${versions.poi}" // Apple iWork @@ -69,3 +68,1929 @@ forbiddenPatterns { exclude '**/*.pdf' exclude '**/*.epub' } + +thirdPartyAudit.excludes = [ + // classes are missing: some due to our whitelisting of parsers + 'com.coremedia.iso.IsoFile', + 'com.coremedia.iso.boxes.Box', + 'com.coremedia.iso.boxes.Container', + 'com.coremedia.iso.boxes.FileTypeBox', + 'com.coremedia.iso.boxes.MetaBox', + 'com.coremedia.iso.boxes.MovieBox', + 'com.coremedia.iso.boxes.MovieHeaderBox', + 'com.coremedia.iso.boxes.SampleTableBox', + 'com.coremedia.iso.boxes.TrackBox', + 'com.coremedia.iso.boxes.TrackHeaderBox', + 'com.coremedia.iso.boxes.UserDataBox', + 'com.coremedia.iso.boxes.apple.AppleItemListBox', + 'com.coremedia.iso.boxes.sampleentry.AudioSampleEntry', + 'com.drew.imaging.jpeg.JpegMetadataReader', + 'com.drew.imaging.tiff.TiffMetadataReader', + 'com.drew.imaging.webp.WebpMetadataReader', + 'com.drew.lang.ByteArrayReader', + 'com.drew.lang.GeoLocation', + 'com.drew.lang.Rational', + 'com.drew.metadata.Directory', + 'com.drew.metadata.Metadata', + 'com.drew.metadata.Tag', + 'com.drew.metadata.exif.ExifIFD0Directory', + 'com.drew.metadata.exif.ExifReader', + 'com.drew.metadata.exif.ExifSubIFDDirectory', + 'com.drew.metadata.exif.ExifThumbnailDirectory', + 'com.drew.metadata.exif.GpsDirectory', + 'com.drew.metadata.iptc.IptcDirectory', + 'com.drew.metadata.jpeg.JpegCommentDirectory', + 'com.drew.metadata.jpeg.JpegDirectory', + 'com.drew.metadata.xmp.XmpReader', + 'com.github.junrar.Archive', + 'com.github.junrar.rarfile.FileHeader', + 'com.googlecode.mp4parser.DataSource', + 'com.googlecode.mp4parser.boxes.apple.AppleAlbumBox', + 'com.googlecode.mp4parser.boxes.apple.AppleArtist2Box', + 'com.googlecode.mp4parser.boxes.apple.AppleArtistBox', + 'com.googlecode.mp4parser.boxes.apple.AppleCommentBox', + 'com.googlecode.mp4parser.boxes.apple.AppleCompilationBox', + 'com.googlecode.mp4parser.boxes.apple.AppleDiskNumberBox', + 'com.googlecode.mp4parser.boxes.apple.AppleEncoderBox', + 'com.googlecode.mp4parser.boxes.apple.AppleGenreBox', + 'com.googlecode.mp4parser.boxes.apple.AppleNameBox', + 'com.googlecode.mp4parser.boxes.apple.AppleRecordingYear2Box', + 'com.googlecode.mp4parser.boxes.apple.AppleTrackAuthorBox', + 'com.googlecode.mp4parser.boxes.apple.AppleTrackNumberBox', + 'com.googlecode.mp4parser.boxes.apple.Utf8AppleDataBox', + 'com.googlecode.mp4parser.util.CastUtils', + 'com.healthmarketscience.jackcess.Column', + 'com.healthmarketscience.jackcess.CryptCodecProvider', + 'com.healthmarketscience.jackcess.DataType', + 'com.healthmarketscience.jackcess.Database', + 'com.healthmarketscience.jackcess.DatabaseBuilder', + 'com.healthmarketscience.jackcess.PropertyMap$Property', + 'com.healthmarketscience.jackcess.PropertyMap', + 'com.healthmarketscience.jackcess.Row', + 'com.healthmarketscience.jackcess.Table', + 'com.healthmarketscience.jackcess.query.Query', + 'com.healthmarketscience.jackcess.util.LinkResolver', + 'com.healthmarketscience.jackcess.util.OleBlob$CompoundContent', + 'com.healthmarketscience.jackcess.util.OleBlob$Content', + 'com.healthmarketscience.jackcess.util.OleBlob$ContentType', + 'com.healthmarketscience.jackcess.util.OleBlob$LinkContent', + 'com.healthmarketscience.jackcess.util.OleBlob$OtherContent', + 'com.healthmarketscience.jackcess.util.OleBlob$SimplePackageContent', + 'com.healthmarketscience.jackcess.util.OleBlob', + 'com.healthmarketscience.jackcess.util.TableIterableBuilder', + 'com.ibm.icu.text.Bidi', + 'com.ibm.icu.text.Normalizer', + 'com.jmatio.io.MatFileHeader', + 'com.jmatio.io.MatFileReader', + 'com.jmatio.types.MLArray', + 'com.jmatio.types.MLStructure', + 'com.microsoft.schemas.office.x2006.digsig.STPositiveInteger', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureComments', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureProviderUrl', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureText', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureType', + 'com.microsoft.schemas.office.x2006.digsig.STUniqueIdentifierWithBraces', + 'com.microsoft.schemas.office.x2006.digsig.STVersion', + 'com.pff.PSTAttachment', + 'com.pff.PSTFile', + 'com.pff.PSTFolder', + 'com.pff.PSTMessage', + 'com.sun.syndication.feed.synd.SyndContent', + 'com.sun.syndication.feed.synd.SyndEntry', + 'com.sun.syndication.feed.synd.SyndFeed', + 'com.sun.syndication.io.SyndFeedInput', + 'com.uwyn.jhighlight.renderer.Renderer', + 'com.uwyn.jhighlight.renderer.XhtmlRendererFactory', + 'de.l3s.boilerpipe.BoilerpipeExtractor', + 'de.l3s.boilerpipe.document.TextBlock', + 'de.l3s.boilerpipe.document.TextDocument', + 'de.l3s.boilerpipe.extractors.DefaultExtractor', + 'de.l3s.boilerpipe.sax.BoilerpipeHTMLContentHandler', + 'javax.mail.BodyPart', + 'javax.mail.Header', + 'javax.mail.Message$RecipientType', + 'javax.mail.MessagingException', + 'javax.mail.Multipart', + 'javax.mail.Part', + 'javax.mail.Session', + 'javax.mail.Transport', + 'javax.mail.internet.ContentType', + 'javax.mail.internet.InternetAddress', + 'javax.mail.internet.InternetHeaders', + 'javax.mail.internet.MimeBodyPart', + 'javax.mail.internet.MimeMessage', + 'javax.mail.internet.MimeMultipart', + 'javax.mail.internet.MimePart', + 'javax.mail.internet.SharedInputStream', + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'javax.ws.rs.core.Response', + 'junit.framework.TestCase', + 'opennlp.tools.namefind.NameFinderME', + 'opennlp.tools.namefind.TokenNameFinderModel', + 'opennlp.tools.util.Span', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.commons.csv.CSVFormat', + 'org.apache.commons.csv.CSVParser', + 'org.apache.commons.csv.CSVRecord', + 'org.apache.commons.exec.CommandLine', + 'org.apache.commons.exec.DefaultExecutor', + 'org.apache.commons.exec.ExecuteWatchdog', + 'org.apache.commons.exec.PumpStreamHandler', + 'org.apache.commons.exec.environment.EnvironmentUtils', + 'org.apache.ctakes.typesystem.type.refsem.UmlsConcept', + 'org.apache.ctakes.typesystem.type.textsem.IdentifiedAnnotation', + 'org.apache.cxf.jaxrs.client.WebClient', + 'org.apache.cxf.jaxrs.ext.multipart.Attachment', + 'org.apache.cxf.jaxrs.ext.multipart.ContentDisposition', + 'org.apache.cxf.jaxrs.ext.multipart.MultipartBody', + 'org.apache.james.mime4j.MimeException', + 'org.apache.james.mime4j.codec.DecodeMonitor', + 'org.apache.james.mime4j.codec.DecoderUtil', + 'org.apache.james.mime4j.dom.FieldParser', + 'org.apache.james.mime4j.dom.address.Address', + 'org.apache.james.mime4j.dom.address.AddressList', + 'org.apache.james.mime4j.dom.address.Mailbox', + 'org.apache.james.mime4j.dom.address.MailboxList', + 'org.apache.james.mime4j.dom.field.AddressListField', + 'org.apache.james.mime4j.dom.field.DateTimeField', + 'org.apache.james.mime4j.dom.field.MailboxListField', + 'org.apache.james.mime4j.dom.field.ParsedField', + 'org.apache.james.mime4j.dom.field.UnstructuredField', + 'org.apache.james.mime4j.field.LenientFieldParser', + 'org.apache.james.mime4j.parser.ContentHandler', + 'org.apache.james.mime4j.parser.MimeStreamParser', + 'org.apache.james.mime4j.stream.BodyDescriptor', + 'org.apache.james.mime4j.stream.Field', + 'org.apache.james.mime4j.stream.MimeConfig', + 'org.apache.jcp.xml.dsig.internal.dom.DOMDigestMethod', + 'org.apache.jcp.xml.dsig.internal.dom.DOMKeyInfo', + 'org.apache.jcp.xml.dsig.internal.dom.DOMReference', + 'org.apache.jcp.xml.dsig.internal.dom.DOMSignedInfo', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + 'org.apache.sis.internal.util.CheckedArrayList', + 'org.apache.sis.internal.util.CheckedHashSet', + 'org.apache.sis.metadata.iso.DefaultMetadata', + 'org.apache.sis.metadata.iso.DefaultMetadataScope', + 'org.apache.sis.metadata.iso.constraint.DefaultLegalConstraints', + 'org.apache.sis.metadata.iso.extent.DefaultGeographicBoundingBox', + 'org.apache.sis.metadata.iso.extent.DefaultGeographicDescription', + 'org.apache.sis.metadata.iso.identification.DefaultDataIdentification', + 'org.apache.sis.storage.DataStore', + 'org.apache.sis.storage.DataStores', + 'org.apache.sis.util.collection.CodeListSet', + 'org.apache.tools.ant.BuildException', + 'org.apache.tools.ant.FileScanner', + 'org.apache.tools.ant.Project', + 'org.apache.tools.ant.taskdefs.Jar', + 'org.apache.tools.ant.taskdefs.Javac', + 'org.apache.tools.ant.taskdefs.MatchingTask', + 'org.apache.tools.ant.types.FileSet', + 'org.apache.tools.ant.types.Path$PathElement', + 'org.apache.tools.ant.types.Path', + 'org.apache.tools.ant.types.Reference', + 'org.apache.uima.UIMAFramework', + 'org.apache.uima.analysis_engine.AnalysisEngine', + 'org.apache.uima.cas.Type', + 'org.apache.uima.cas.impl.XCASSerializer', + 'org.apache.uima.cas.impl.XmiCasSerializer', + 'org.apache.uima.cas.impl.XmiSerializationSharedData', + 'org.apache.uima.fit.util.JCasUtil', + 'org.apache.uima.jcas.JCas', + 'org.apache.uima.jcas.cas.FSArray', + 'org.apache.uima.util.XMLInputSource', + 'org.apache.uima.util.XMLParser', + 'org.apache.uima.util.XmlCasSerializer', + 'org.apache.xml.security.Init', + 'org.apache.xml.security.c14n.Canonicalizer', + 'org.apache.xml.security.utils.Base64', + 'org.bouncycastle.asn1.DERObject', + 'org.etsi.uri.x01903.v13.AnyType', + 'org.etsi.uri.x01903.v13.ClaimedRolesListType', + 'org.etsi.uri.x01903.v13.CounterSignatureType', + 'org.etsi.uri.x01903.v13.DataObjectFormatType$Factory', + 'org.etsi.uri.x01903.v13.DataObjectFormatType', + 'org.etsi.uri.x01903.v13.IdentifierType', + 'org.etsi.uri.x01903.v13.IncludeType', + 'org.etsi.uri.x01903.v13.ObjectIdentifierType', + 'org.etsi.uri.x01903.v13.OtherCertStatusRefsType', + 'org.etsi.uri.x01903.v13.OtherCertStatusValuesType', + 'org.etsi.uri.x01903.v13.ReferenceInfoType', + 'org.etsi.uri.x01903.v13.SigPolicyQualifiersListType', + 'org.etsi.uri.x01903.v13.SignaturePolicyIdType', + 'org.etsi.uri.x01903.v13.SignatureProductionPlaceType', + 'org.etsi.uri.x01903.v13.SignedDataObjectPropertiesType', + 'org.etsi.uri.x01903.v13.SignerRoleType', + 'org.etsi.uri.x01903.v13.UnsignedDataObjectPropertiesType', + 'org.etsi.uri.x01903.v13.impl.CRLRefsTypeImpl$1CRLRefList', + 'org.etsi.uri.x01903.v13.impl.CRLValuesTypeImpl$1EncapsulatedCRLValueList', + 'org.etsi.uri.x01903.v13.impl.CertIDListTypeImpl$1CertList', + 'org.etsi.uri.x01903.v13.impl.CertificateValuesTypeImpl$1EncapsulatedX509CertificateList', + 'org.etsi.uri.x01903.v13.impl.CertificateValuesTypeImpl$1OtherCertificateList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1EncapsulatedTimeStampList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1IncludeList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1ReferenceInfoList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1XMLTimeStampList', + 'org.etsi.uri.x01903.v13.impl.OCSPRefsTypeImpl$1OCSPRefList', + 'org.etsi.uri.x01903.v13.impl.OCSPValuesTypeImpl$1EncapsulatedOCSPValueList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1ArchiveTimeStampList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttrAuthoritiesCertValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeCertificateRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeRevocationRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeRevocationValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CertificateValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CompleteCertificateRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CompleteRevocationRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CounterSignatureList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1RefsOnlyTimeStampList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1RevocationValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SigAndRefsTimeStampList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SignatureTimeStampList', + 'org.etsi.uri.x01903.v14.ValidationDataType$Factory', + 'org.etsi.uri.x01903.v14.ValidationDataType', + 'org.json.JSONArray', + 'org.json.JSONObject', + 'org.json.XML', + 'org.json.simple.JSONArray', + 'org.json.simple.JSONObject', + 'org.json.simple.JSONValue', + 'org.junit.Test', + 'org.junit.internal.TextListener', + 'org.junit.runner.JUnitCore', + 'org.junit.runner.Result', + 'org.objectweb.asm.AnnotationVisitor', + 'org.objectweb.asm.Attribute', + 'org.objectweb.asm.ClassReader', + 'org.objectweb.asm.ClassVisitor', + 'org.objectweb.asm.FieldVisitor', + 'org.objectweb.asm.MethodVisitor', + 'org.objectweb.asm.Type', + 'org.opengis.metadata.Identifier', + 'org.opengis.metadata.citation.Address', + 'org.opengis.metadata.citation.Citation', + 'org.opengis.metadata.citation.CitationDate', + 'org.opengis.metadata.citation.Contact', + 'org.opengis.metadata.citation.DateType', + 'org.opengis.metadata.citation.OnLineFunction', + 'org.opengis.metadata.citation.OnlineResource', + 'org.opengis.metadata.citation.ResponsibleParty', + 'org.opengis.metadata.citation.Role', + 'org.opengis.metadata.constraint.Restriction', + 'org.opengis.metadata.distribution.DigitalTransferOptions', + 'org.opengis.metadata.distribution.Distribution', + 'org.opengis.metadata.distribution.Distributor', + 'org.opengis.metadata.distribution.Format', + 'org.opengis.metadata.extent.Extent', + 'org.opengis.metadata.identification.Identification', + 'org.opengis.metadata.identification.KeywordType', + 'org.opengis.metadata.identification.Keywords', + 'org.opengis.metadata.identification.Progress', + 'org.opengis.metadata.identification.TopicCategory', + 'org.opengis.metadata.maintenance.ScopeCode', + 'org.opengis.util.InternationalString', + + // Missing openxml schema classes are explained by the fact we use the smaller jar: + // "The full jar of all of the schemas is ooxml-schemas-xx.jar, and it is currently around 15mb. + // The smaller poi-ooxml-schemas jar is only about 4mb. + // This latter jar file only contains the typically used parts though." + // http://poi.apache.org/faq.html#faq-N10025 + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTArea3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTAreaChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTAxisUnit', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBar3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBarChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBubbleChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTChartLines', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDLbls', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDPt', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDTable', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDateAx', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispBlanksAs', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispUnits', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDoughnutChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTErrBars', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExtensionList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExternalData', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTFirstSliceAng', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTGrouping', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLblAlgn', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLblOffset', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLegendEntry', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLine3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTMarkerSize', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTMultiLvlStrRef', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTOfPieChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPie3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotFmts', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotSource', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTProtection', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTRadarChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTRelId', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSerAx', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSkip', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStockChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStyle', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurface', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurface3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurfaceChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTextLanguageID', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTrendline', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTUpDownBars', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTView3D', + 'org.openxmlformats.schemas.drawingml.x2006.chart.STPageSetupOrientation', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLegendImpl$1LegendEntryList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineChartImpl$1AxIdList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineChartImpl$1SerList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1DPtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1TrendlineList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTNumDataImpl$1PtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPieChartImpl$1SerList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPieSerImpl$1DPtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Area3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1AreaChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Bar3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1BarChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1BubbleChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1CatAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1DateAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1DoughnutChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Line3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1LineChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1OfPieChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Pie3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1PieChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1RadarChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1ScatterChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1SerAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1StockChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Surface3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1SurfaceChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1ValAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterChartImpl$1AxIdList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterChartImpl$1SerList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1DPtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1ErrBarsList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1TrendlineList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTStrDataImpl$1PtList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaBiLevelEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaCeilingEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaFloorEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaInverseEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaModulateEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaReplaceEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAngle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioCD', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTBiLevelEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTBlurEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTCell3D', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorChangeEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorReplaceEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorSchemeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTComplementTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTConnectionSite', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTConnectorLocking', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTCustomColorList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTDashStopList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTDuotoneEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTEffectContainer', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTEmbeddedWAVAudioFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTFillOverlayEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTFlatText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGammaTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGlowEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGrayscaleEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGrayscaleTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGroupFillProperties', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGroupLocking', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTHSLEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTInnerShadowEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseGammaTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTLineJoinBevel', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTLuminanceEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTObjectStyleDefaults', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPath2DArcTo', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPatternFillProperties', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPolarAdjustHandle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPositiveFixedAngle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetShadowEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetTextShape', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTQuickTimeFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTReflectionEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTScene3D', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTShape3D', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTShapeLocking', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTSoftEdgesEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTSupplementalFont', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTableBackgroundStyle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTablePartStyle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBlipBullet', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletColorFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletSizeFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletTypefaceFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillGroupWrapper', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineLineFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTileInfoProperties', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTintEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTVideoFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTXYAdjustHandle', + 'org.openxmlformats.schemas.drawingml.x2006.main.STBlackWhiteMode', + 'org.openxmlformats.schemas.drawingml.x2006.main.STBlipCompression', + 'org.openxmlformats.schemas.drawingml.x2006.main.STFixedAngle', + 'org.openxmlformats.schemas.drawingml.x2006.main.STGuid', + 'org.openxmlformats.schemas.drawingml.x2006.main.STPanose', + 'org.openxmlformats.schemas.drawingml.x2006.main.STPathFillMode', + 'org.openxmlformats.schemas.drawingml.x2006.main.STRectAlignment', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTextColumnCount', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTextNonNegativePoint', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTextTabAlignType', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTileFlipMode', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhPolarList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhXYList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1BlipFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1GradFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1GrpFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1NoFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1PattFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1SolidFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaBiLevelList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaCeilingList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaFloorList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaInvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaModFixList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaReplList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1BiLevelList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1BlurList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1ClrChangeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1ClrReplList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1DuotoneList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1FillOverlayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1GraysclList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1HslList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTConnectionSiteListImpl$1CxnList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectStyleListImpl$1EffectStyleList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1BlipFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1GradFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1GrpFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1NoFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1PattFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1SolidFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFontCollectionImpl$1FontList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTGeomGuideListImpl$1GdList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTGradientStopListImpl$1GsList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTLineStyleListImpl$1LnList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTOfficeArtExtensionListImpl$1ExtList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DCubicBezierToImpl$1PtList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1ArcToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1CloseList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1CubicBezToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1LnToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1MoveToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1QuadBezToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DListImpl$1PathList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableGridImpl$1GridColList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableImpl$1TrList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableRowImpl$1TcList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableStyleListImpl$1TblStyleList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextBodyImpl$1PList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1BrList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1FldList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1RList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextTabStopListImpl$1TabList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.CTAbsoluteAnchor', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1AbsoluteAnchorList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1OneCellAnchorList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1TwoCellAnchorList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1CxnSpList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1GraphicFrameList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1GrpSpList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1PicList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1SpList', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTEffectExtent', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTPosH', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTPosV', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapNone', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapSquare', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapThrough', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapTight', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapTopBottom', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTArray', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTCf', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTEmpty', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTNull', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTVstream', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STCy', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STError', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STVectorBaseType', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1BoolList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1BstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1CfList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1ClsidList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1CyList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1DateList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1ErrorList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1FiletimeList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1LpstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1LpwstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1R4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1R8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1VariantList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2BoolList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2BstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2ClsidList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2CyList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2DateList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2ErrorList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2FiletimeList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2LpstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2LpwstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2R4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2R8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui8List', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTAcc', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBar', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBorderBox', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBox', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTD', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTEqArr', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTF', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTFunc', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTGroupChr', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTLimLow', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTLimUpp', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTM', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTMathPr', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTNary', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTOMath', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTOMathPara', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTPhant', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTR', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTRad', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSPre', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSub', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSubSup', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSup', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTControlList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTCustomShowList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTCustomerData', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTEmbeddedFontList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionListModify', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTHandoutMasterIdList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTHeaderFooter', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTKinsoku', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTModifyVerifier', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTPhotoAlbum', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideLayoutIdList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideTiming', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideTransition', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSmartTags', + 'org.openxmlformats.schemas.presentationml.x2006.main.STBookmarkIdSeed', + 'org.openxmlformats.schemas.presentationml.x2006.main.STDirection', + 'org.openxmlformats.schemas.presentationml.x2006.main.STIndex', + 'org.openxmlformats.schemas.presentationml.x2006.main.STPlaceholderSize', + 'org.openxmlformats.schemas.presentationml.x2006.main.STSlideSizeType', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentAuthorListImpl$1CmAuthorList', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentListImpl$1CmList', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCustomerDataListImpl$1CustDataList', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTGroupShapeImpl$1GraphicFrameList', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTGroupShapeImpl$1PicList', + 'org.openxmlformats.schemas.schemaLibrary.x2006.main.CTSchemaLibrary', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTAutoSortScope', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTBoolean', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCacheHierarchies', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCalculatedItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCalculatedMembers', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCellStyles', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCellWatches', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartFormats', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetPr', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetProtection', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColHierarchiesUsage', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColors', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTConditionalFormats', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTConsolidation', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTControls', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCsPageSetup', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomChartsheetViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomProperties', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomSheetViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomWorkbookViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDataBinding', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDataConsolidate', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDateTime', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDdeLink', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDimensions', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTError', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTExtensionList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTExternalSheetDataSet', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFieldGroup', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileRecoveryPr', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileSharing', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileVersion', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFilterColumn', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFormats', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFunctionGroups', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTGradientFill', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTIgnoredErrors', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureDimensionMaps', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureGroups', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMissing', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTNumber', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleLink', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleObjects', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleSize', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPCDKPIs', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPhoneticRun', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotFilters', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotHierarchies', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotSelection', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTProtectedRanges', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRecord', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRowHierarchiesUsage', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRowItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTScenarios', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSheetBackgroundPicture', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTagPr', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTagTypes', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTags', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSortState', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTString', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableFormula', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableStyleInfo', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableStyles', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTupleCache', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishObjects', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishing', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTX', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STCellSpans', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STDataValidationImeMode', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STFieldSortType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STGuid', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STObjects', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPhoneticAlignment', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPhoneticType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPrintError', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STRefMode', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STSheetViewType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STShowDataAs', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTableType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTimePeriod', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTotalsRowFunction', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STUpdateLinks', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STVisibility', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAuthorsImpl$1AuthorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAuthorsImpl$2AuthorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAutoFilterImpl$1FilterColumnList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTBookViewsImpl$1WorkbookViewList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTBordersImpl$1BorderList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldImpl$1MpMapList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldsImpl$1CacheFieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCalcChainImpl$1CList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellStyleXfsImpl$1XfList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellXfsImpl$1XfList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCfRuleImpl$1FormulaList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCfRuleImpl$2FormulaList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColFieldsImpl$1FieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1CfvoList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1ColorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColsImpl$1ColList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCommentListImpl$1CommentList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTConditionalFormattingImpl$1CfRuleList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDataBarImpl$1CfvoList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDataValidationsImpl$1DataValidationList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDxfsImpl$1DxfList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalDefinedNamesImpl$1DefinedNameList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalReferencesImpl$1ExternalReferenceList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalSheetNamesImpl$1SheetNameList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFillsImpl$1FillList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1BList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1CharsetList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ColorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1CondenseList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ExtendList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1FamilyList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1IList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1NameList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1OutlineList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1SchemeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ShadowList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1StrikeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1SzList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1UList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1VertAlignList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontsImpl$1FontList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTHyperlinksImpl$1HyperlinkList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTIconSetImpl$1CfvoList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTItemsImpl$1ItemList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMapInfoImpl$1MapList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMapInfoImpl$1SchemaList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMergeCellsImpl$1MergeCellList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTNumFmtsImpl$1NumFmtList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPageBreakImpl$1BrkList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPageFieldsImpl$1PageFieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotCacheRecordsImpl$1RList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotCachesImpl$1PivotCacheList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotFieldsImpl$1PivotFieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1BList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1CharsetList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ColorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1CondenseList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ExtendList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1FamilyList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1IList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1OutlineList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1RFontList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1SchemeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ShadowList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1StrikeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1SzList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1UList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1VertAlignList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowFieldsImpl$1FieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowImpl$1CList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRstImpl$1RList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRstImpl$1RPhList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1BList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1DList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1EList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1MList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1NList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1SList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetDataImpl$1RowList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1PivotSelectionList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1SelectionList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewsImpl$1SheetViewList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSingleXmlCellsImpl$1SingleXmlCellList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSstImpl$1SiList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTTableColumnsImpl$1TableColumnList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTTablePartsImpl$1TablePartList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorkbookImpl$1FileRecoveryPrList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorksheetImpl$1ColsList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorksheetImpl$1ConditionalFormattingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAltChunk', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAttr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTBackground', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCaptions', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCellMergeTrackChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCharacterSpacing', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCnf', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTColorSchemeMapping', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTColumns', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCompat', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTControl', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlBlock', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlCell', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlRow', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlRun', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDataBinding', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocGrid', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocRsids', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocVars', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEastAsianLayout', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnDocProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEm', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFDDList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFHelpText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFName', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFStatusText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFTextInput', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFitText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFramePr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnDocProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTHighlight', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTKinsoku', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLevelSuffix', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLineNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLock', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLongHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLvlLegacy', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMacroName', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMailMerge', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMultiLevelType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTNumPicBullet', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageBorders', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageMar', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageSz', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPaperSource', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTParaRPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPerm', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPermStart', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPlaceholder', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTProof', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTRPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTReadingModeInkLockDown', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTRuby', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSaveThroughXslt', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtComboBox', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtDate', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtDropDownList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtRow', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSectPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSectType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShapeDefaults', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShortHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSignedTwipsMeasure', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSmartTagType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblGridChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblLayoutType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblOverlap', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPPr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPrExChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblStylePr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTcMar', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTcPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextDirection', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextEffect', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextScale', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextboxTightWrap', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrackChangeNumbering', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrackChangesView', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTwipsMeasure', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTView', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTWriteProtection', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTWritingStyle', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDateTime', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDisplacedByCustomXml', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STHeightRule', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STHint', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabAlignment', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabLeader', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabRelativeTo', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STProofErr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STShortHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STThemeColor', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STUcharHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STZoom', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTAbstractNumImpl$1LvlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentsImpl$1CommentList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1AnchorList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1InlineList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTEndnotesImpl$1EndnoteList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1CalcOnExitList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1DdListList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1EnabledList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1EntryMacroList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1ExitMacroList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1HelpTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1NameList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1StatusTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1TextInputList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFootnotesImpl$1FootnoteList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTLatentStylesImpl$1LsdExceptionList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumImpl$1LvlOverrideList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1AbstractNumList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumPicBulletList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1AnnotationRefList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1BrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1CommentReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1ContinuationSeparatorList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1CrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DayLongList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DayShortList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelInstrTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DrawingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1EndnoteRefList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1EndnoteReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FldCharList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FootnoteRefList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FootnoteReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1InstrTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1LastRenderedPageBreakList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1MonthLongList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1MonthShortList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1NoBreakHyphenList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1ObjectList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PgNumList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PictList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PtabList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1RubyList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SeparatorList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SoftHyphenList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SymList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1TList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1TabList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1YearLongList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1YearShortList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1TcList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1AccList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BarList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BorderBoxList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BoxList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1DList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1EqArrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1FList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1FuncList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1GroupChrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1LimLowList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1LimUppList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1NaryList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PhantList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1R2List', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1RadList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SPreList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSubList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSubSupList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSupList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1TcList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtEndPrImpl$1RPrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1AliasList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1BibliographyList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1CitationList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1ComboBoxList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DataBindingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DateList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DocPartListList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DocPartObjList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DropDownListList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1EquationList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1GroupList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1IdList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1LockList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1PictureList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1PlaceholderList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1RPrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1RichTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1ShowingPlcHdrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TemporaryList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSectPrImpl$1FooterReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSectPrImpl$1HeaderReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1ActiveWritingStyleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1AttachedSchemaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1SmartTagTypeList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagPrImpl$1AttrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTStyleImpl$1TblStylePrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTStylesImpl$1StyleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTabsImpl$1TabList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblGridBaseImpl$1GridColList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1TrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1CantSplitList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1CnfStyleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1DivIdList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1GridAfterList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1GridBeforeList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1HiddenList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1JcList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TblCellSpacingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TblHeaderList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TrHeightList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1WAfterList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1WBeforeList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1TblList', + 'org.osgi.framework.BundleActivator', + 'org.osgi.framework.BundleContext', + 'org.osgi.framework.ServiceReference', + 'org.osgi.framework.ServiceRegistration', + 'org.osgi.util.tracker.ServiceTracker', + 'org.osgi.util.tracker.ServiceTrackerCustomizer', + 'org.sqlite.SQLiteConfig', + 'org.tukaani.xz.ARMOptions', + 'org.tukaani.xz.ARMThumbOptions', + 'org.tukaani.xz.DeltaOptions', + 'org.tukaani.xz.FilterOptions', + 'org.tukaani.xz.FinishableWrapperOutputStream', + 'org.tukaani.xz.IA64Options', + 'org.tukaani.xz.LZMA2InputStream', + 'org.tukaani.xz.LZMA2Options', + 'org.tukaani.xz.LZMAInputStream', + 'org.tukaani.xz.PowerPCOptions', + 'org.tukaani.xz.SPARCOptions', + 'org.tukaani.xz.SingleXZInputStream', + 'org.tukaani.xz.UnsupportedOptionsException', + 'org.tukaani.xz.X86Options', + 'org.tukaani.xz.XZ', + 'org.tukaani.xz.XZInputStream', + 'org.tukaani.xz.XZOutputStream', + 'org.w3.x2000.x09.xmldsig.KeyInfoType', + 'org.w3.x2000.x09.xmldsig.SignatureMethodType', + 'org.w3.x2000.x09.xmldsig.SignatureValueType', + 'org.w3.x2000.x09.xmldsig.TransformsType', + 'org.w3.x2000.x09.xmldsig.impl.SignatureTypeImpl$1ObjectList', + 'org.w3.x2000.x09.xmldsig.impl.SignedInfoTypeImpl$1ReferenceList', + 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$1XPathList', + 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$2XPathList', + 'schemasMicrosoftComOfficeExcel.STCF', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1Accel2List', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AccelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AnchorList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoFillList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoLineList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoPictList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoScaleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CFList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CameraList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CancelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CheckedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColHiddenList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColoredList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColumnList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DDEList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DefaultList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DefaultSizeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DisabledList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DismissList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DropLinesList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DropStyleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FirstButtonList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaGroupList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaLinkList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaMacroList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaPictList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaRangeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaTxbxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1HelpList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1HorizList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1IncList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1JustLastXList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LCTList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ListItemList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LockTextList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LockedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MapOCXList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MaxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MinList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MoveWithCellsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MultiLineList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MultiSelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1NoThreeD2List', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1NoThreeDList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1PageList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1PrintObjectList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RecalcAlwaysList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RowHiddenList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RowList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptExtendedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptLanguageList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptLocationList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptTextList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SecretEditList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SelTypeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SizeWithCellsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1TextHAlignList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1TextVAlignList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1UIObjList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VScrollList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VTEditList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ValList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ValidIdsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VisibleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1WidthMinList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2Accel2List', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AccelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AnchorList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoFillList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoLineList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoPictList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoScaleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CFList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CameraList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CancelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CheckedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColHiddenList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColoredList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColumnList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DDEList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DefaultList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DefaultSizeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DisabledList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DismissList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DropLinesList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DropStyleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FirstButtonList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaGroupList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaLinkList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaMacroList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaPictList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaRangeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaTxbxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2HelpList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2HorizList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2IncList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2JustLastXList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LCTList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ListItemList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LockTextList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LockedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MapOCXList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MaxList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MinList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MoveWithCellsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MultiLineList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MultiSelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2NoThreeD2List', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2NoThreeDList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2PageList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2PrintObjectList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RecalcAlwaysList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RowHiddenList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RowList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptExtendedList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptLanguageList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptLocationList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptTextList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SecretEditList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SelList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SelTypeList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SizeWithCellsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2TextHAlignList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2TextVAlignList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2UIObjList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VScrollList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VTEditList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ValList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ValidIdsList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VisibleList', + 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2WidthMinList', + 'schemasMicrosoftComOfficeOffice.CTCallout', + 'schemasMicrosoftComOfficeOffice.CTClipPath', + 'schemasMicrosoftComOfficeOffice.CTComplex', + 'schemasMicrosoftComOfficeOffice.CTDiagram', + 'schemasMicrosoftComOfficeOffice.CTExtrusion', + 'schemasMicrosoftComOfficeOffice.CTFill', + 'schemasMicrosoftComOfficeOffice.CTInk', + 'schemasMicrosoftComOfficeOffice.CTRegroupTable', + 'schemasMicrosoftComOfficeOffice.CTRules', + 'schemasMicrosoftComOfficeOffice.CTSignatureLine', + 'schemasMicrosoftComOfficeOffice.CTSkew', + 'schemasMicrosoftComOfficeOffice.CTStrokeChild', + 'schemasMicrosoftComOfficeOffice.STBWMode', + 'schemasMicrosoftComOfficeOffice.STConnectorType', + 'schemasMicrosoftComOfficeOffice.STHrAlign', + 'schemasMicrosoftComOfficeOffice.STRelationshipId', + 'schemasMicrosoftComOfficeOffice.STTrueFalse', + 'schemasMicrosoftComOfficeOffice.STTrueFalseBlank', + 'schemasMicrosoftComOfficePowerpoint.CTEmpty', + 'schemasMicrosoftComOfficePowerpoint.CTRel', + 'schemasMicrosoftComOfficeWord.CTAnchorLock', + 'schemasMicrosoftComOfficeWord.CTBorder', + 'schemasMicrosoftComOfficeWord.CTWrap', + 'schemasMicrosoftComVml.CTArc', + 'schemasMicrosoftComVml.CTCurve', + 'schemasMicrosoftComVml.CTImage', + 'schemasMicrosoftComVml.CTImageData', + 'schemasMicrosoftComVml.CTLine', + 'schemasMicrosoftComVml.CTOval', + 'schemasMicrosoftComVml.CTPolyLine', + 'schemasMicrosoftComVml.CTRect', + 'schemasMicrosoftComVml.CTRoundRect', + 'schemasMicrosoftComVml.STEditAs', + 'schemasMicrosoftComVml.STFillMethod', + 'schemasMicrosoftComVml.STFillType', + 'schemasMicrosoftComVml.STImageAspect', + 'schemasMicrosoftComVml.STShadowType', + 'schemasMicrosoftComVml.STStrokeArrowLength', + 'schemasMicrosoftComVml.STStrokeArrowType', + 'schemasMicrosoftComVml.STStrokeArrowWidth', + 'schemasMicrosoftComVml.STStrokeEndCap', + 'schemasMicrosoftComVml.STStrokeLineStyle', + 'schemasMicrosoftComVml.STTrueFalseBlank', + 'schemasMicrosoftComVml.impl.CTFormulasImpl$1FList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1AnchorlockList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ArcList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderbottomList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderleftList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderrightList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1BordertopList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1CalloutList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ClientDataList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ClippathList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1CurveList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1DiagramList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ExtrusionList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1FillList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1FormulasList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1GroupList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1HandlesList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ImageList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ImagedataList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1LineList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1LockList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1OvalList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1PathList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1PolylineList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1RectList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1RoundrectList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShadowList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShapeList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShapetypeList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1SignaturelineList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1SkewList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1StrokeList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextboxList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextdataList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextpathList', + 'schemasMicrosoftComVml.impl.CTGroupImpl$1WrapList', + 'schemasMicrosoftComVml.impl.CTHandlesImpl$1HList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1AnchorlockList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderbottomList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderleftList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderrightList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1BordertopList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1CalloutList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1ClippathList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1ExtrusionList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1FillList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1FormulasList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1HandlesList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1ImagedataList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1InkList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1IscommentList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1LockList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1PathList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1ShadowList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1SignaturelineList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1SkewList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1StrokeList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextboxList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextdataList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextpathList', + 'schemasMicrosoftComVml.impl.CTShapeImpl$1WrapList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1AnchorlockList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderbottomList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderleftList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderrightList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BordertopList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1CalloutList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ClientDataList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ClippathList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ExtrusionList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1FillList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1FormulasList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1HandlesList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ImagedataList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1LockList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1PathList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ShadowList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1SignaturelineList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1SkewList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1StrokeList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextboxList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextdataList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextpathList', + 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1WrapList', + 'ucar.ma2.DataType', + 'ucar.nc2.Attribute', + 'ucar.nc2.Dimension', + 'ucar.nc2.Group', + 'ucar.nc2.NetcdfFile', + 'ucar.nc2.Variable', + 'ucar.nc2.dataset.NetcdfDataset', +] diff --git a/plugins/mapper-attachments/licenses/stax-api-1.0.1.jar.sha1 b/plugins/mapper-attachments/licenses/stax-api-1.0.1.jar.sha1 deleted file mode 100644 index 4426e34685d..00000000000 --- a/plugins/mapper-attachments/licenses/stax-api-1.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49c100caf72d658aca8e58bd74a4ba90fa2b0d70 \ No newline at end of file diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index eb0e143c946..d43b5df7e4a 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -30,14 +30,23 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Map; -import static org.elasticsearch.index.mapper.MapperBuilders.*; +import static org.elasticsearch.index.mapper.MapperBuilders.dateField; +import static org.elasticsearch.index.mapper.MapperBuilders.integerField; +import static org.elasticsearch.index.mapper.MapperBuilders.stringField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; -import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType; /** *

      @@ -65,7 +74,6 @@ public class AttachmentMapper extends FieldMapper {
           public static final String CONTENT_TYPE = "attachment";
       
           public static class Defaults {
      -        public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
       
               public static final AttachmentFieldType FIELD_TYPE = new AttachmentFieldType();
               static {
      @@ -108,8 +116,6 @@ public class AttachmentMapper extends FieldMapper {
       
           public static class Builder extends FieldMapper.Builder {
       
      -        private ContentPath.Type pathType = Defaults.PATH_TYPE;
      -
               private Boolean ignoreErrors = null;
       
               private Integer defaultIndexedChars = null;
      @@ -135,16 +141,11 @@ public class AttachmentMapper extends FieldMapper {
               private Mapper.Builder languageBuilder = stringField(FieldNames.LANGUAGE);
       
               public Builder(String name) {
      -            super(name, new AttachmentFieldType());
      +            super(name, new AttachmentFieldType(), new AttachmentFieldType());
                   this.builder = this;
                   this.contentBuilder = stringField(FieldNames.CONTENT);
               }
       
      -        public Builder pathType(ContentPath.Type pathType) {
      -            this.pathType = pathType;
      -            return this;
      -        }
      -
               public Builder content(Mapper.Builder content) {
                   this.contentBuilder = content;
                   return this;
      @@ -192,8 +193,6 @@ public class AttachmentMapper extends FieldMapper {
       
               @Override
               public AttachmentMapper build(BuilderContext context) {
      -            ContentPath.Type origPathType = context.path().pathType();
      -            context.path().pathType(pathType);
       
                   FieldMapper contentMapper;
                   if (context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) {
      @@ -220,8 +219,6 @@ public class AttachmentMapper extends FieldMapper {
                   FieldMapper language = (FieldMapper) languageBuilder.build(context);
                   context.path().remove();
       
      -            context.path().pathType(origPathType);
      -
                   if (defaultIndexedChars == null && context.indexSettings() != null) {
                       defaultIndexedChars = context.indexSettings().getAsInt("index.mapping.attachment.indexed_chars", 100000);
                   }
      @@ -257,7 +254,7 @@ public class AttachmentMapper extends FieldMapper {
       
                   defaultFieldType.freeze();
                   this.setupFieldType(context);
      -            return new AttachmentMapper(name, fieldType, defaultFieldType, pathType, defaultIndexedChars, ignoreErrors, langDetect, contentMapper,
      +            return new AttachmentMapper(name, fieldType, defaultFieldType, defaultIndexedChars, ignoreErrors, langDetect, contentMapper,
                           dateMapper, titleMapper, nameMapper, authorMapper, keywordsMapper, contentTypeMapper, contentLength,
                           language, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
               }
      @@ -309,10 +306,7 @@ public class AttachmentMapper extends FieldMapper {
                       Map.Entry entry = iterator.next();
                       String fieldName = entry.getKey();
                       Object fieldNode = entry.getValue();
      -                if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
      -                    builder.pathType(parsePathType(name, fieldNode.toString()));
      -                    iterator.remove();
      -                } else if (fieldName.equals("fields")) {
      +                if (fieldName.equals("fields")) {
                           Map fieldsNode = (Map) fieldNode;
                           for (Iterator> fieldsIterator = fieldsNode.entrySet().iterator(); fieldsIterator.hasNext();) {
                               Map.Entry entry1 = fieldsIterator.next();
      @@ -375,8 +369,6 @@ public class AttachmentMapper extends FieldMapper {
               }
           }
       
      -    private final ContentPath.Type pathType;
      -
           private final int defaultIndexedChars;
       
           private final boolean ignoreErrors;
      @@ -401,13 +393,12 @@ public class AttachmentMapper extends FieldMapper {
       
           private final FieldMapper languageMapper;
       
      -    public AttachmentMapper(String simpleName, MappedFieldType type, MappedFieldType defaultFieldType, ContentPath.Type pathType, int defaultIndexedChars, Boolean ignoreErrors,
      +    public AttachmentMapper(String simpleName, MappedFieldType type, MappedFieldType defaultFieldType, int defaultIndexedChars, Boolean ignoreErrors,
                                   Boolean defaultLangDetect, FieldMapper contentMapper,
                                   FieldMapper dateMapper, FieldMapper titleMapper, FieldMapper nameMapper, FieldMapper authorMapper,
                                   FieldMapper keywordsMapper, FieldMapper contentTypeMapper, FieldMapper contentLengthMapper,
                                   FieldMapper languageMapper, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
               super(simpleName, type, defaultFieldType, indexSettings, multiFields, copyTo);
      -        this.pathType = pathType;
               this.defaultIndexedChars = defaultIndexedChars;
               this.ignoreErrors = ignoreErrors;
               this.defaultLangDetect = defaultLangDetect;
      @@ -602,7 +593,7 @@ public class AttachmentMapper extends FieldMapper {
           }
       
           @Override
      -    public void merge(Mapper mergeWith, MergeResult mergeResult) {
      +    protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
               // ignore this for now
           }
       
      @@ -626,9 +617,6 @@ public class AttachmentMapper extends FieldMapper {
           public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
               builder.startObject(simpleName());
               builder.field("type", CONTENT_TYPE);
      -        if (indexCreatedBefore2x) {
      -            builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
      -        }
       
               builder.startObject("fields");
               contentMapper.toXContent(builder, params);
      diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java
      index 38e292725a5..fa9a2d06f8e 100644
      --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java
      +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java
      @@ -19,6 +19,16 @@ package org.elasticsearch.mapper.attachments;
        * under the License.
        */
       
      +import org.apache.tika.Tika;
      +import org.apache.tika.exception.TikaException;
      +import org.apache.tika.metadata.Metadata;
      +import org.apache.tika.parser.AutoDetectParser;
      +import org.apache.tika.parser.Parser;
      +import org.elasticsearch.SpecialPermission;
      +import org.elasticsearch.bootstrap.JarHell;
      +import org.elasticsearch.common.SuppressForbidden;
      +import org.elasticsearch.common.io.PathUtils;
      +
       import java.io.ByteArrayInputStream;
       import java.io.FilePermission;
       import java.io.IOException;
      @@ -37,16 +47,6 @@ import java.security.ProtectionDomain;
       import java.security.SecurityPermission;
       import java.util.PropertyPermission;
       
      -import org.apache.tika.Tika;
      -import org.apache.tika.exception.TikaException;
      -import org.apache.tika.metadata.Metadata;
      -import org.apache.tika.parser.AutoDetectParser;
      -import org.apache.tika.parser.Parser;
      -import org.elasticsearch.SpecialPermission;
      -import org.elasticsearch.bootstrap.JarHell;
      -import org.elasticsearch.common.SuppressForbidden;
      -import org.elasticsearch.common.io.PathUtils;
      -
       /**
        * Runs tika with limited parsers and limited permissions.
        * 

      @@ -69,13 +69,13 @@ final class TikaImpl { new org.apache.tika.parser.xml.DcXMLParser(), new org.apache.tika.parser.epub.EpubParser(), }; - + /** autodetector based on this subset */ private static final AutoDetectParser PARSER_INSTANCE = new AutoDetectParser(PARSERS); - + /** singleton tika instance */ private static final Tika TIKA_INSTANCE = new Tika(PARSER_INSTANCE.getDetector(), PARSER_INSTANCE); - + /** * parses with tika, throwing any exception hit while parsing the document */ diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java index f93785ed14a..7b93dbc3155 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.mapper.attachments; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; @@ -43,7 +44,7 @@ public class DateAttachmentMapperTests extends AttachmentUnitTestCase { public void testSimpleMappings() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/date/date-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); // Our mapping should be kept as a String assertThat(docMapper.mappers().getMapper("file.date"), instanceOf(StringFieldMapper.class)); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java index 10e82e24c84..21627daeb53 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; @@ -32,7 +33,11 @@ import java.io.IOException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; /** * Test for https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/18 @@ -45,7 +50,7 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/htmlWithValidDateMeta.html"); byte[] pdf = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/encrypted.pdf"); @@ -56,25 +61,25 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { .endObject().bytes(); ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file1.content").fieldType().names().indexName()), containsString("World")); - assertThat(doc.get(docMapper.mappers().getMapper("file1.title").fieldType().names().indexName()), equalTo("Hello")); - assertThat(doc.get(docMapper.mappers().getMapper("file1.author").fieldType().names().indexName()), equalTo("kimchy")); - assertThat(doc.get(docMapper.mappers().getMapper("file1.keywords").fieldType().names().indexName()), equalTo("elasticsearch,cool,bonsai")); - assertThat(doc.get(docMapper.mappers().getMapper("file1.content_type").fieldType().names().indexName()), startsWith("text/html;")); - assertThat(doc.getField(docMapper.mappers().getMapper("file1.content_length").fieldType().names().indexName()).numericValue().longValue(), greaterThan(0L)); + assertThat(doc.get(docMapper.mappers().getMapper("file1.content").fieldType().name()), containsString("World")); + assertThat(doc.get(docMapper.mappers().getMapper("file1.title").fieldType().name()), equalTo("Hello")); + assertThat(doc.get(docMapper.mappers().getMapper("file1.author").fieldType().name()), equalTo("kimchy")); + assertThat(doc.get(docMapper.mappers().getMapper("file1.keywords").fieldType().name()), equalTo("elasticsearch,cool,bonsai")); + assertThat(doc.get(docMapper.mappers().getMapper("file1.content_type").fieldType().name()), startsWith("text/html;")); + assertThat(doc.getField(docMapper.mappers().getMapper("file1.content_length").fieldType().name()).numericValue().longValue(), greaterThan(0L)); - assertThat(doc.get(docMapper.mappers().getMapper("file2").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file2.title").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file2.author").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file2.keywords").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file2.content_type").fieldType().names().indexName()), nullValue()); - assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().names().indexName()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file2").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file2.title").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file2.author").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file2.keywords").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file2.content_type").fieldType().name()), nullValue()); + assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().name()), nullValue()); } public void testMultipleDocsEncryptedFirst() throws IOException { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/htmlWithValidDateMeta.html"); byte[] pdf = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/encrypted.pdf"); @@ -85,19 +90,19 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { .endObject().bytes(); ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file1").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file1.title").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file1.author").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file1.keywords").fieldType().names().indexName()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file1.content_type").fieldType().names().indexName()), nullValue()); - assertThat(doc.getField(docMapper.mappers().getMapper("file1.content_length").fieldType().names().indexName()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file1").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file1.title").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file1.author").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file1.keywords").fieldType().name()), nullValue()); + assertThat(doc.get(docMapper.mappers().getMapper("file1.content_type").fieldType().name()), nullValue()); + assertThat(doc.getField(docMapper.mappers().getMapper("file1.content_length").fieldType().name()), nullValue()); - assertThat(doc.get(docMapper.mappers().getMapper("file2.content").fieldType().names().indexName()), containsString("World")); - assertThat(doc.get(docMapper.mappers().getMapper("file2.title").fieldType().names().indexName()), equalTo("Hello")); - assertThat(doc.get(docMapper.mappers().getMapper("file2.author").fieldType().names().indexName()), equalTo("kimchy")); - assertThat(doc.get(docMapper.mappers().getMapper("file2.keywords").fieldType().names().indexName()), equalTo("elasticsearch,cool,bonsai")); - assertThat(doc.get(docMapper.mappers().getMapper("file2.content_type").fieldType().names().indexName()), startsWith("text/html;")); - assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().names().indexName()).numericValue().longValue(), greaterThan(0L)); + assertThat(doc.get(docMapper.mappers().getMapper("file2.content").fieldType().name()), containsString("World")); + assertThat(doc.get(docMapper.mappers().getMapper("file2.title").fieldType().name()), equalTo("Hello")); + assertThat(doc.get(docMapper.mappers().getMapper("file2.author").fieldType().name()), equalTo("kimchy")); + assertThat(doc.get(docMapper.mappers().getMapper("file2.keywords").fieldType().name()), equalTo("elasticsearch,cool,bonsai")); + assertThat(doc.get(docMapper.mappers().getMapper("file2.content_type").fieldType().name()), startsWith("text/html;")); + assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().name()).numericValue().longValue(), greaterThan(0L)); } public void testMultipleDocsEncryptedNotIgnoringErrors() throws IOException { @@ -107,7 +112,7 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/htmlWithValidDateMeta.html"); byte[] pdf = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/encrypted.pdf"); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java index 868ecb3ae55..5d81df7a7fe 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.mapper.attachments; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.MapperTestUtils; @@ -53,7 +54,7 @@ public class LanguageDetectionAttachmentMapperTests extends AttachmentUnitTestCa Settings.settingsBuilder().put("index.mapping.attachment.detect_language", langDetect).build(), getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/language/language-mapping.json"); - docMapper = mapperParser.parse(mapping); + docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); assertThat(docMapper.mappers().getMapper("file.language"), instanceOf(StringFieldMapper.class)); } @@ -76,7 +77,7 @@ public class LanguageDetectionAttachmentMapperTests extends AttachmentUnitTestCa ParseContext.Document doc = docMapper.parse("person", "person", "1", xcb.bytes()).rootDoc(); // Our mapping should be kept as a String - assertThat(doc.get(docMapper.mappers().getMapper("file.language").fieldType().names().indexName()), equalTo(expected)); + assertThat(doc.get(docMapper.mappers().getMapper("file.language").fieldType().name()), equalTo(expected)); } public void testFrDetection() throws Exception { @@ -121,6 +122,6 @@ public class LanguageDetectionAttachmentMapperTests extends AttachmentUnitTestCa ParseContext.Document doc = docMapper.parse("person", "person", "1", xcb.bytes()).rootDoc(); // Our mapping should be kept as a String - assertThat(doc.get(docMapper.mappers().getMapper("file.language").fieldType().names().indexName()), equalTo("en")); + assertThat(doc.get(docMapper.mappers().getMapper("file.language").fieldType().name()), equalTo("en")); } } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java index bdbafea710a..1eecda65a05 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java @@ -22,7 +22,6 @@ package org.elasticsearch.mapper.attachments; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.mapper.attachments.MapperAttachmentsPlugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java index acf0163acd9..b44a6d55eb9 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; @@ -32,7 +33,12 @@ import java.io.IOException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; /** * Test for https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/38 @@ -47,7 +53,7 @@ public class MetadataMapperTests extends AttachmentUnitTestCase { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), settings, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/metadata/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/" + filename); BytesReference json = jsonBuilder() @@ -59,21 +65,21 @@ public class MetadataMapperTests extends AttachmentUnitTestCase { .endObject().bytes(); ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), containsString("World")); - assertThat(doc.get(docMapper.mappers().getMapper("file.name").fieldType().names().indexName()), equalTo(filename)); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), containsString("World")); + assertThat(doc.get(docMapper.mappers().getMapper("file.name").fieldType().name()), equalTo(filename)); if (expectedDate == null) { - assertThat(doc.getField(docMapper.mappers().getMapper("file.date").fieldType().names().indexName()), nullValue()); + assertThat(doc.getField(docMapper.mappers().getMapper("file.date").fieldType().name()), nullValue()); } else { - assertThat(doc.getField(docMapper.mappers().getMapper("file.date").fieldType().names().indexName()).numericValue().longValue(), is(expectedDate)); + assertThat(doc.getField(docMapper.mappers().getMapper("file.date").fieldType().name()).numericValue().longValue(), is(expectedDate)); } - assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().names().indexName()), equalTo("Hello")); - assertThat(doc.get(docMapper.mappers().getMapper("file.author").fieldType().names().indexName()), equalTo("kimchy")); - assertThat(doc.get(docMapper.mappers().getMapper("file.keywords").fieldType().names().indexName()), equalTo("elasticsearch,cool,bonsai")); - assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), startsWith("text/html;")); + assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().name()), equalTo("Hello")); + assertThat(doc.get(docMapper.mappers().getMapper("file.author").fieldType().name()), equalTo("kimchy")); + assertThat(doc.get(docMapper.mappers().getMapper("file.keywords").fieldType().name()), equalTo("elasticsearch,cool,bonsai")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().name()), startsWith("text/html;")); if (expectedLength == null) { - assertNull(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().names().indexName()).numericValue().longValue()); + assertNull(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().name()).numericValue().longValue()); } else { - assertThat(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().names().indexName()).numericValue().longValue(), greaterThan(0L)); + assertThat(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().name()).numericValue().longValue(), greaterThan(0L)); } } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java index 40593ddb1bb..9e756796352 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.common.Base64; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.MapperTestUtils; @@ -36,7 +37,10 @@ import org.junit.Before; import java.nio.charset.StandardCharsets; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; /** * @@ -59,7 +63,7 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { public void testSimpleMappings() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/multifield/multifield-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); assertThat(docMapper.mappers().getMapper("file.content"), instanceOf(StringFieldMapper.class)); @@ -95,7 +99,7 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/multifield/multifield-mapping.json"); - DocumentMapper documentMapper = mapperService.documentMapperParser().parse(mapping); + DocumentMapper documentMapper = mapperService.documentMapperParser().parse("person", new CompressedXContent(mapping)); ParsedDocument doc = documentMapper.parse("person", "person", "1", XContentFactory.jsonBuilder() .startObject() diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java index 01e87dc1430..fd5f480700c 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.mapper.attachments; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -30,12 +28,13 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; -import org.junit.Test; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; /** * @@ -45,41 +44,27 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { public void testSimpleMappings() throws Exception { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html"); BytesReference json = jsonBuilder().startObject().field("file", html).endObject().bytes(); ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), startsWith("application/xhtml+xml")); - assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().names().indexName()), equalTo("XHTML test document")); - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), containsString("This document tests the ability of Apache Tika to extract content")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().name()), startsWith("application/xhtml+xml")); + assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().name()), equalTo("XHTML test document")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), containsString("This document tests the ability of Apache Tika to extract content")); // re-parse it String builtMapping = docMapper.mappingSource().string(); - docMapper = mapperParser.parse(builtMapping); + docMapper = mapperParser.parse("person", new CompressedXContent(builtMapping)); json = jsonBuilder().startObject().field("file", html).endObject().bytes(); doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), startsWith("application/xhtml+xml")); - assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().names().indexName()), equalTo("XHTML test document")); - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), containsString("This document tests the ability of Apache Tika to extract content")); - } - - public void testContentBackcompat() throws Exception { - DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(), - getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); - byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html"); - - BytesReference json = jsonBuilder().startObject().field("file", html).endObject().bytes(); - - ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get("file"), containsString("This document tests the ability of Apache Tika to extract content")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().name()), startsWith("application/xhtml+xml")); + assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().name()), equalTo("XHTML test document")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), containsString("This document tests the ability of Apache Tika to extract content")); } /** @@ -88,27 +73,27 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { public void testSimpleMappingsWithAllFields() throws Exception { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping-all-fields.json"); - DocumentMapper docMapper = mapperParser.parse(mapping); + DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html"); BytesReference json = jsonBuilder().startObject().field("file", html).endObject().bytes(); ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), startsWith("application/xhtml+xml")); - assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().names().indexName()), equalTo("XHTML test document")); - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), containsString("This document tests the ability of Apache Tika to extract content")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().name()), startsWith("application/xhtml+xml")); + assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().name()), equalTo("XHTML test document")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), containsString("This document tests the ability of Apache Tika to extract content")); // re-parse it String builtMapping = docMapper.mappingSource().string(); - docMapper = mapperParser.parse(builtMapping); + docMapper = mapperParser.parse("person", new CompressedXContent(builtMapping)); json = jsonBuilder().startObject().field("file", html).endObject().bytes(); doc = docMapper.parse("person", "person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), startsWith("application/xhtml+xml")); - assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().names().indexName()), equalTo("XHTML test document")); - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), containsString("This document tests the ability of Apache Tika to extract content")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().name()), startsWith("application/xhtml+xml")); + assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().name()), equalTo("XHTML test document")); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), containsString("This document tests the ability of Apache Tika to extract content")); } /** diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java index fcd430d0fbc..217d48a8565 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.CliToolConfig; import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; @@ -92,7 +93,7 @@ public class StandaloneRunner extends CliTool { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(PathUtils.get("."), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); // use CWD b/c it won't be used String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/standalone/standalone-mapping.json"); - docMapper = mapperParser.parse(mapping); + docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); } @Override @@ -134,7 +135,7 @@ public class StandaloneRunner extends CliTool { } private void printMetadataContent(ParseContext.Document doc, String field) { - terminal.println("- %s: %s", field, doc.get(docMapper.mappers().getMapper("file." + field).fieldType().names().indexName())); + terminal.println("- %s: %s", field, doc.get(docMapper.mappers().getMapper("file." + field).fieldType().name())); } public static byte[] copyToBytes(Path path) throws IOException { diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java index a5e3ec9c17c..fbbdeb83a7d 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java @@ -19,17 +19,16 @@ package org.elasticsearch.mapper.attachments; * under the License. */ +import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; +import org.apache.lucene.util.TestUtil; +import org.apache.tika.metadata.Metadata; +import org.elasticsearch.test.ESTestCase; + import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; -import org.apache.lucene.util.TestUtil; -import org.apache.tika.metadata.Metadata; - -import org.elasticsearch.test.ESTestCase; - -/** +/** * Evil test-coverage cheat, we parse a bunch of docs from tika * so that we have a nice grab-bag variety, and assert some content * comes back and no exception. @@ -43,7 +42,7 @@ public class TikaDocTests extends ESTestCase { public void testFiles() throws Exception { Path tmp = createTempDir(); TestUtil.unzip(getClass().getResourceAsStream(TIKA_FILES), tmp); - + try (DirectoryStream stream = Files.newDirectoryStream(tmp)) { for (Path doc : stream) { logger.debug("parsing: {}", doc); @@ -51,7 +50,7 @@ public class TikaDocTests extends ESTestCase { } } } - + void assertParseable(Path fileName) throws Exception { try { byte bytes[] = Files.readAllBytes(fileName); @@ -60,7 +59,7 @@ public class TikaDocTests extends ESTestCase { assertFalse(parsedContent.isEmpty()); logger.debug("extracted content: {}", parsedContent); } catch (Throwable e) { - throw new RuntimeException("parsing of filename: " + fileName.getFileName() + " failed", e); + throw new RuntimeException("parsing of filename: " + fileName.getFileName() + " failed", e); } } } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaImplTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaImplTests.java index fc17d59603f..f42110c1e62 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaImplTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaImplTests.java @@ -1,5 +1,24 @@ package org.elasticsearch.mapper.attachments; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + import org.elasticsearch.test.ESTestCase; public class TikaImplTests extends ESTestCase { diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java index 5341e038cff..9475c85a5f4 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.mapper.attachments; import org.apache.tika.io.IOUtils; import org.apache.tika.metadata.Metadata; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; @@ -33,7 +34,14 @@ import java.io.IOException; import java.io.InputStream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.*; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.AUTHOR; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.CONTENT_LENGTH; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.CONTENT_TYPE; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.DATE; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.KEYWORDS; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.LANGUAGE; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.NAME; +import static org.elasticsearch.mapper.attachments.AttachmentMapper.FieldNames.TITLE; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.isEmptyOrNullString; @@ -51,7 +59,7 @@ public class VariousDocTests extends AttachmentUnitTestCase { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/various-doc/test-mapping.json"); - docMapper = mapperParser.parse(mapping); + docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); } /** @@ -148,8 +156,8 @@ public class VariousDocTests extends AttachmentUnitTestCase { ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); if (!errorExpected) { - assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), not(isEmptyOrNullString())); - logger.debug("-> extracted content: {}", doc.get(docMapper.mappers().getMapper("file").fieldType().names().indexName())); + assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().name()), not(isEmptyOrNullString())); + logger.debug("-> extracted content: {}", doc.get(docMapper.mappers().getMapper("file").fieldType().name())); logger.debug("-> extracted metadata:"); printMetadataContent(doc, AUTHOR); printMetadataContent(doc, CONTENT_LENGTH); @@ -163,6 +171,6 @@ public class VariousDocTests extends AttachmentUnitTestCase { } private void printMetadataContent(ParseContext.Document doc, String field) { - logger.debug("- [{}]: [{}]", field, doc.get(docMapper.mappers().getMapper("file." + field).fieldType().names().indexName())); + logger.debug("- [{}]: [{}]", field, doc.get(docMapper.mappers().getMapper("file." + field).fieldType().name())); } } diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index 60c31c3f765..03b00d2ac39 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -66,8 +66,7 @@ public class Murmur3FieldMapper extends LongFieldMapper { Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - fieldMapper.includeInAll(includeInAll); - return fieldMapper; + return (Murmur3FieldMapper) fieldMapper.includeInAll(includeInAll); } @Override diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java index 5dfc48570e6..97c5ad994a4 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.murmur3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index da65210f6d9..603fcbbf820 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -59,7 +60,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field") .field("type", "murmur3") .endObject().endObject().endObject().endObject().string(); - DocumentMapper mapper = parser.parse(mapping); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes()); IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); assertNotNull(fields); @@ -76,7 +77,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("doc_values", false) .endObject().endObject().endObject().endObject().string(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("Setting [doc_values] cannot be modified")); @@ -89,7 +90,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("doc_values", true) .endObject().endObject().endObject().endObject().string(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("Setting [doc_values] cannot be modified")); @@ -103,7 +104,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("index", "not_analyzed") .endObject().endObject().endObject().endObject().string(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("Setting [index] cannot be modified")); @@ -116,7 +117,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("index", "no") .endObject().endObject().endObject().endObject().string(); try { - parser.parse(mapping); + parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); } catch (MapperParsingException e) { assertTrue(e.getMessage().contains("Setting [index] cannot be modified")); @@ -134,7 +135,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("doc_values", false) .endObject().endObject().endObject().endObject().string(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field"); assertFalse(mapper.fieldType().hasDocValues()); } @@ -150,7 +151,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("index", "not_analyzed") .endObject().endObject().endObject().endObject().string(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field"); assertEquals(IndexOptions.DOCS, mapper.fieldType().indexOptions()); } diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index aaf46553a75..baeba9f4bbd 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.IntegerFieldMapper; @@ -55,7 +54,7 @@ public class SizeFieldMapper extends MetadataFieldMapper { static { SIZE_FIELD_TYPE.setStored(true); SIZE_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_32_BIT); - SIZE_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + SIZE_FIELD_TYPE.setName(NAME); SIZE_FIELD_TYPE.setIndexAnalyzer(NumericIntegerAnalyzer.buildNamedAnalyzer(Defaults.PRECISION_STEP_32_BIT)); SIZE_FIELD_TYPE.setSearchAnalyzer(NumericIntegerAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE)); SIZE_FIELD_TYPE.freeze(); @@ -67,7 +66,7 @@ public class SizeFieldMapper extends MetadataFieldMapper { protected EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; public Builder(MappedFieldType existing) { - super(NAME, existing == null ? Defaults.SIZE_FIELD_TYPE : existing); + super(NAME, existing == null ? Defaults.SIZE_FIELD_TYPE : existing, Defaults.SIZE_FIELD_TYPE); builder = this; } @@ -162,27 +161,22 @@ public class SizeFieldMapper extends MetadataFieldMapper { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // all are defaults, no need to write it at all - if (!includeDefaults && enabledState == Defaults.ENABLED_STATE && (indexCreatedBefore2x == false || fieldType().stored() == false)) { + if (!includeDefaults && enabledState == Defaults.ENABLED_STATE) { return builder; } builder.startObject(contentType()); if (includeDefaults || enabledState != Defaults.ENABLED_STATE) { builder.field("enabled", enabledState.enabled); } - if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() == true)) { - builder.field("store", fieldType().stored()); - } builder.endObject(); return builder; } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) { + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { SizeFieldMapper sizeFieldMapperMergeWith = (SizeFieldMapper) mergeWith; - if (!mergeResult.simulate()) { - if (sizeFieldMapperMergeWith.enabledState != enabledState && !sizeFieldMapperMergeWith.enabledState.unset()) { - this.enabledState = sizeFieldMapperMergeWith.enabledState; - } + if (sizeFieldMapperMergeWith.enabledState != enabledState && !sizeFieldMapperMergeWith.enabledState.unset()) { + this.enabledState = sizeFieldMapperMergeWith.enabledState; } } } diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java index c267160eeb7..9899776f7dd 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.size; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.plugin.mapper.MapperSizePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java index 73f7a73547c..5bb251dca14 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java @@ -21,16 +21,13 @@ package org.elasticsearch.index.mapper.size; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugin.mapper.MapperSizePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.junit.Test; import java.io.IOException; import java.util.Collection; -import java.util.Collections; import java.util.Locale; import java.util.Map; diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index e07b76bfc92..403eb284f96 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -19,48 +19,54 @@ package org.elasticsearch.index.mapper.size; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -import java.util.Collections; - import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + public class SizeMappingTests extends ESSingleNodeTestCase { - - MapperRegistry mapperRegistry; + IndexService indexService; + MapperService mapperService; DocumentMapperParser parser; @Before public void before() { indexService = createIndex("test"); - mapperRegistry = new MapperRegistry( - Collections.emptyMap(), - Collections.singletonMap(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser())); - parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), - indexService.analysisService(), indexService.similarityService(), mapperRegistry); + Map metadataMappers = new HashMap<>(); + IndicesModule indices = new IndicesModule(); + indices.registerMetadataMapper(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser()); + mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), indices.getMapperRegistry()); + parser = mapperService.documentMapperParser(); } public void testSizeEnabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() @@ -72,7 +78,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(true)); assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); } - + public void testSizeEnabledAndStoredBackcompat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", true).field("store", "yes").endObject() @@ -80,12 +86,12 @@ public class SizeMappingTests extends ESSingleNodeTestCase { Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); indexService = createIndex("test2", indexSettings); - mapperRegistry = new MapperRegistry( + MapperRegistry mapperRegistry = new MapperRegistry( Collections.emptyMap(), Collections.singletonMap(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser())); - parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), + parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService, indexService.analysisService(), indexService.similarityService(), mapperRegistry); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() @@ -97,12 +103,12 @@ public class SizeMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(true)); assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); } - + public void testSizeDisabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() @@ -113,11 +119,11 @@ public class SizeMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("_size"), nullValue()); } - + public void testSizeNotSet() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() .startObject() @@ -128,19 +134,18 @@ public class SizeMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("_size"), nullValue()); } - + public void testThatDisablingWorksWhenMerging() throws Exception { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper enabledMapper = parser.parse(enabledMapping); + DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = parser.parse(disabledMapping); + DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false); - enabledMapper.merge(disabledMapper.mapping(), false, false); - assertThat(enabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); + assertThat(disabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); } -} \ No newline at end of file +} diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java index 99a505c5666..cf97008249f 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java @@ -22,6 +22,7 @@ package org.elasticsearch.cloud.azure.blobstore; import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cloud.azure.storage.AzureStorageService; +import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; @@ -31,6 +32,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoryName; import org.elasticsearch.repositories.RepositorySettings; +import org.elasticsearch.repositories.azure.AzureRepository.Defaults; import java.io.InputStream; import java.io.OutputStream; @@ -38,8 +40,7 @@ import java.net.URISyntaxException; import java.util.Locale; import java.util.Map; -import static org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage.CONTAINER; -import static org.elasticsearch.repositories.azure.AzureRepository.CONTAINER_DEFAULT; +import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getRepositorySettings; import static org.elasticsearch.repositories.azure.AzureRepository.Repository; public class AzureBlobStore extends AbstractComponent implements BlobStore { @@ -56,13 +57,13 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore { AzureStorageService client) throws URISyntaxException, StorageException { super(settings); this.client = client.start(); - this.container = repositorySettings.settings().get("container", settings.get(CONTAINER, CONTAINER_DEFAULT)); + this.container = getRepositorySettings(repositorySettings, Repository.CONTAINER, Storage.CONTAINER, Defaults.CONTAINER); this.repositoryName = name.getName(); // NOTE: null account means to use the first one specified in config - this.accountName = repositorySettings.settings().get(Repository.ACCOUNT, null); + this.accountName = getRepositorySettings(repositorySettings, Repository.ACCOUNT, Storage.ACCOUNT, null); - String modeStr = repositorySettings.settings().get(Repository.LOCATION_MODE, null); + String modeStr = getRepositorySettings(repositorySettings, Repository.LOCATION_MODE, Storage.LOCATION_MODE, null); if (modeStr == null) { this.locMode = LocationMode.PRIMARY_ONLY; } else { diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index 5a551f54de3..9ed909c0b8f 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -19,8 +19,8 @@ package org.elasticsearch.cloud.azure.storage; -import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.StorageException; import org.elasticsearch.common.blobstore.BlobMetaData; import java.io.InputStream; @@ -37,9 +37,14 @@ public interface AzureStorageService { final class Storage { public static final String PREFIX = "cloud.azure.storage."; @Deprecated - public static final String ACCOUNT = "cloud.azure.storage.account"; + public static final String ACCOUNT_DEPRECATED = "cloud.azure.storage.account"; @Deprecated - public static final String KEY = "cloud.azure.storage.key"; + public static final String KEY_DEPRECATED = "cloud.azure.storage.key"; + + public static final String TIMEOUT = "cloud.azure.storage.timeout"; + + public static final String ACCOUNT = "repositories.azure.account"; + public static final String LOCATION_MODE = "repositories.azure.location_mode"; public static final String CONTAINER = "repositories.azure.container"; public static final String BASE_PATH = "repositories.azure.base_path"; public static final String CHUNK_SIZE = "repositories.azure.chunk_size"; diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java index 56e75d7386c..8b453867de6 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java @@ -22,7 +22,11 @@ package org.elasticsearch.cloud.azure.storage; import com.microsoft.azure.storage.CloudStorageAccount; import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; -import com.microsoft.azure.storage.blob.*; +import com.microsoft.azure.storage.blob.BlobProperties; +import com.microsoft.azure.storage.blob.CloudBlobClient; +import com.microsoft.azure.storage.blob.CloudBlobContainer; +import com.microsoft.azure.storage.blob.CloudBlockBlob; +import com.microsoft.azure.storage.blob.ListBlobItem; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; @@ -47,7 +51,7 @@ public class AzureStorageServiceImpl extends AbstractLifecycleComponent secondariesStorageSettings; final Map clients; - + @Inject public AzureStorageServiceImpl(Settings settings) { super(settings); @@ -81,7 +85,7 @@ public class AzureStorageServiceImpl extends AbstractLifecycleComponent listBlobsByPrefix(String account, LocationMode mode, String container, String keyPath, String prefix) throws URISyntaxException, StorageException { // NOTE: this should be here: if (prefix == null) prefix = ""; - // however, this is really inefficient since deleteBlobsByPrefix enumerates everything and + // however, this is really inefficient since deleteBlobsByPrefix enumerates everything and // then does a prefix match on the result; it should just call listBlobsByPrefix with the prefix! - + logger.debug("listing container [{}], keyPath [{}], prefix [{}]", container, keyPath, prefix); MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java index 7fd0312df29..c7380e2fd7f 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java @@ -24,6 +24,9 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.repositories.RepositorySettings; import java.util.HashMap; import java.util.Map; @@ -34,11 +37,13 @@ public class AzureStorageSettings { private String name; private String account; private String key; + private TimeValue timeout; - public AzureStorageSettings(String name, String account, String key) { + public AzureStorageSettings(String name, String account, String key, TimeValue timeout) { this.name = name; this.account = account; this.key = key; + this.timeout = timeout; } public String getName() { @@ -53,12 +58,17 @@ public class AzureStorageSettings { return account; } + public TimeValue getTimeout() { + return timeout; + } + @Override public String toString() { final StringBuffer sb = new StringBuffer("AzureStorageSettings{"); sb.append("name='").append(name).append('\''); sb.append(", account='").append(account).append('\''); sb.append(", key='").append(key).append('\''); + sb.append(", timeout=").append(timeout); sb.append('}'); return sb.toString(); } @@ -73,12 +83,15 @@ public class AzureStorageSettings { Map secondaryStorage = new HashMap<>(); // We check for deprecated settings - String account = settings.get(Storage.ACCOUNT); - String key = settings.get(Storage.KEY); + String account = settings.get(Storage.ACCOUNT_DEPRECATED); + String key = settings.get(Storage.KEY_DEPRECATED); + + TimeValue globalTimeout = settings.getAsTime(Storage.TIMEOUT, TimeValue.timeValueMinutes(5)); + if (account != null) { logger.warn("[{}] and [{}] have been deprecated. Use now [{}xxx.account] and [{}xxx.key] where xxx is any name", - Storage.ACCOUNT, Storage.KEY, Storage.PREFIX, Storage.PREFIX); - primaryStorage = new AzureStorageSettings(null, account, key); + Storage.ACCOUNT_DEPRECATED, Storage.KEY_DEPRECATED, Storage.PREFIX, Storage.PREFIX); + primaryStorage = new AzureStorageSettings(null, account, key, globalTimeout); } else { Settings storageSettings = settings.getByPrefix(Storage.PREFIX); if (storageSettings != null) { @@ -87,7 +100,8 @@ public class AzureStorageSettings { if (storage.getValue() instanceof Map) { @SuppressWarnings("unchecked") Map map = (Map) storage.getValue(); - AzureStorageSettings current = new AzureStorageSettings(storage.getKey(), map.get("account"), map.get("key")); + TimeValue timeout = TimeValue.parseTimeValue(map.get("timeout"), globalTimeout, Storage.PREFIX + storage.getKey() + ".timeout"); + AzureStorageSettings current = new AzureStorageSettings(storage.getKey(), map.get("account"), map.get("key"), timeout); boolean activeByDefault = Boolean.parseBoolean(map.getOrDefault("default", "false")); if (activeByDefault) { if (primaryStorage == null) { @@ -119,4 +133,28 @@ public class AzureStorageSettings { return Tuple.tuple(primaryStorage, secondaryStorage); } + + public static String getRepositorySettings(RepositorySettings repositorySettings, + String repositorySettingName, + String repositoriesSettingName, + String defaultValue) { + return repositorySettings.settings().get(repositorySettingName, + repositorySettings.globalSettings().get(repositoriesSettingName, defaultValue)); + } + + public static ByteSizeValue getRepositorySettingsAsBytesSize(RepositorySettings repositorySettings, + String repositorySettingName, + String repositoriesSettingName, + ByteSizeValue defaultValue) { + return repositorySettings.settings().getAsBytesSize(repositorySettingName, + repositorySettings.globalSettings().getAsBytesSize(repositoriesSettingName, defaultValue)); + } + + public static Boolean getRepositorySettingsAsBoolean(RepositorySettings repositorySettings, + String repositorySettingName, + String repositoriesSettingName, + Boolean defaultValue) { + return repositorySettings.settings().getAsBoolean(repositorySettingName, + repositorySettings.globalSettings().getAsBoolean(repositoriesSettingName, defaultValue)); + } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilter.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilter.java index c061d262f0b..2c4e7957af3 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilter.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilter.java @@ -19,6 +19,7 @@ package org.elasticsearch.cloud.azure.storage; +import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -30,6 +31,8 @@ public class AzureStorageSettingsFilter extends AbstractComponent { public AzureStorageSettingsFilter(Settings settings, SettingsFilter settingsFilter) { super(settings); // Cloud storage API settings needed to be hidden - settingsFilter.addFilter("cloud.azure.storage.*"); + settingsFilter.addFilter(Storage.PREFIX + "*.account"); + settingsFilter.addFilter(Storage.PREFIX + "*.key"); + settingsFilter.addFilter(Storage.ACCOUNT); } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 829ccb7e95e..a3abf9b4adf 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -19,8 +19,8 @@ package org.elasticsearch.repositories.azure; -import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cloud.azure.blobstore.AzureBlobStore; import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; import org.elasticsearch.cluster.metadata.MetaData; @@ -43,6 +43,10 @@ import java.net.URISyntaxException; import java.util.List; import java.util.Locale; +import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getRepositorySettings; +import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getRepositorySettingsAsBoolean; +import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getRepositorySettingsAsBytesSize; + /** * Azure file system implementation of the BlobStoreRepository *

      @@ -57,7 +61,13 @@ import java.util.Locale; public class AzureRepository extends BlobStoreRepository { public final static String TYPE = "azure"; - public final static String CONTAINER_DEFAULT = "elasticsearch-snapshots"; + + static public final class Defaults { + public static final String CONTAINER = "elasticsearch-snapshots"; + public static final ByteSizeValue CHUNK_SIZE = new ByteSizeValue(64, ByteSizeUnit.MB); + public static final Boolean COMPRESS = false; + } + static public final class Repository { public static final String ACCOUNT = "account"; @@ -83,21 +93,18 @@ public class AzureRepository extends BlobStoreRepository { AzureBlobStore azureBlobStore) throws IOException, URISyntaxException, StorageException { super(name.getName(), repositorySettings, indexShardRepository); - String container = repositorySettings.settings().get(Repository.CONTAINER, - settings.get(Storage.CONTAINER, CONTAINER_DEFAULT)); + String container = getRepositorySettings(repositorySettings, Repository.CONTAINER, Storage.CONTAINER, Defaults.CONTAINER); this.blobStore = azureBlobStore; - this.chunkSize = repositorySettings.settings().getAsBytesSize(Repository.CHUNK_SIZE, - settings.getAsBytesSize(Storage.CHUNK_SIZE, new ByteSizeValue(64, ByteSizeUnit.MB))); + this.chunkSize = getRepositorySettingsAsBytesSize(repositorySettings, Repository.CHUNK_SIZE, Storage.CHUNK_SIZE, Defaults.CHUNK_SIZE); if (this.chunkSize.getMb() > 64) { logger.warn("azure repository does not support yet size > 64mb. Fall back to 64mb."); this.chunkSize = new ByteSizeValue(64, ByteSizeUnit.MB); } - this.compress = repositorySettings.settings().getAsBoolean(Repository.COMPRESS, - settings.getAsBoolean(Storage.COMPRESS, false)); - String modeStr = repositorySettings.settings().get(Repository.LOCATION_MODE, null); + this.compress = getRepositorySettingsAsBoolean(repositorySettings, Repository.COMPRESS, Storage.COMPRESS, Defaults.COMPRESS); + String modeStr = getRepositorySettings(repositorySettings, Repository.LOCATION_MODE, Storage.LOCATION_MODE, null); if (modeStr != null) { LocationMode locationMode = LocationMode.valueOf(modeStr.toUpperCase(Locale.ROOT)); if (locationMode == LocationMode.SECONDARY_ONLY) { @@ -109,7 +116,7 @@ public class AzureRepository extends BlobStoreRepository { readonly = false; } - String basePath = repositorySettings.settings().get(Repository.BASE_PATH, null); + String basePath = getRepositorySettings(repositorySettings, Repository.BASE_PATH, Storage.BASE_PATH, null); if (Strings.hasLength(basePath)) { // Remove starting / if any diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java index 8a17f83d92d..b3e878927e9 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceTestCase.java @@ -19,8 +19,8 @@ package org.elasticsearch.cloud.azure; -import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cloud.azure.storage.AzureStorageService; import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; import org.elasticsearch.cloud.azure.storage.AzureStorageServiceMock; @@ -81,8 +81,8 @@ public abstract class AbstractAzureRepositoryServiceTestCase extends AbstractAzu .put(Storage.CONTAINER, "snapshots"); // We use sometime deprecated settings in tests - builder.put(Storage.ACCOUNT, "mock_azure_account") - .put(Storage.KEY, "mock_azure_key"); + builder.put(Storage.ACCOUNT_DEPRECATED, "mock_azure_account") + .put(Storage.KEY_DEPRECATED, "mock_azure_key"); return builder.build(); } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java index 90e44d97a3f..5a1c76df413 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java @@ -19,8 +19,8 @@ package org.elasticsearch.cloud.azure.storage; -import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.StorageException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java index fbf4abc06e5..0c195f04cf5 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java @@ -37,6 +37,7 @@ public class AzureStorageServiceTest extends ESTestCase { .put("cloud.azure.storage.azure2.key", "mykey2") .put("cloud.azure.storage.azure3.account", "myaccount3") .put("cloud.azure.storage.azure3.key", "mykey3") + .put("cloud.azure.storage.azure3.timeout", "30s") .build(); public void testGetSelectedClientWithNoPrimaryAndSecondary() { @@ -89,6 +90,28 @@ public class AzureStorageServiceTest extends ESTestCase { assertThat(client.getEndpoint(), is(URI.create("https://azure1"))); } + public void testGetSelectedClientGlobalTimeout() { + Settings timeoutSettings = Settings.builder() + .put(settings) + .put("cloud.azure.storage.timeout", "10s") + .build(); + + AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(timeoutSettings); + azureStorageService.doStart(); + CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(10 * 1000)); + CloudBlobClient client3 = azureStorageService.getSelectedClient("azure3", LocationMode.PRIMARY_ONLY); + assertThat(client3.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(30 * 1000)); + } + + public void testGetSelectedClientDefaultTimeout() { + AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings); + azureStorageService.doStart(); + CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(5 * 60 * 1000)); + CloudBlobClient client3 = azureStorageService.getSelectedClient("azure3", LocationMode.PRIMARY_ONLY); + assertThat(client3.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(30 * 1000)); + } /** * This internal class just overload createClient method which is called by AzureStorageServiceImpl.doStart() diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTest.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTest.java index bbffba492c4..eaaf9c224d8 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTest.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTest.java @@ -29,8 +29,7 @@ import org.elasticsearch.test.rest.FakeRestRequest; import java.io.IOException; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.contains; public class AzureStorageSettingsFilterTest extends ESTestCase { final static Settings settings = Settings.builder() @@ -52,7 +51,7 @@ public class AzureStorageSettingsFilterTest extends ESTestCase { // Test using direct filtering Settings filteredSettings = SettingsFilter.filterSettings(settingsFilter.getPatterns(), settings); - assertThat(filteredSettings.getAsMap().keySet(), is(empty())); + assertThat(filteredSettings.getAsMap().keySet(), contains("cloud.azure.storage.azure1.default")); // Test using toXContent filtering RestRequest request = new FakeRestRequest(); @@ -63,7 +62,7 @@ public class AzureStorageSettingsFilterTest extends ESTestCase { xContentBuilder.endObject(); String filteredSettingsString = xContentBuilder.string(); filteredSettings = Settings.builder().loadFromSource(filteredSettingsString).build(); - assertThat(filteredSettings.getAsMap().keySet(), is(empty())); + assertThat(filteredSettings.getAsMap().keySet(), contains("cloud.azure.storage.azure1.default")); } } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java index 17c0b01850f..59e8b8945c0 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTest.java @@ -27,7 +27,10 @@ import org.elasticsearch.common.settings.Settings; import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class AzureSettingsParserTest extends LuceneTestCase { @@ -65,8 +68,8 @@ public class AzureSettingsParserTest extends LuceneTestCase { public void testDeprecatedSettings() { Settings settings = Settings.builder() - .put(Storage.ACCOUNT, "myaccount1") - .put(Storage.KEY, "mykey1") + .put(Storage.ACCOUNT_DEPRECATED, "myaccount1") + .put(Storage.KEY_DEPRECATED, "mykey1") .build(); Tuple> tuple = AzureStorageSettings.parse(settings); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java index 7e4285829a8..cec7361de0a 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java @@ -21,9 +21,8 @@ package org.elasticsearch.repositories.azure; import com.carrotsearch.randomizedtesting.RandomizedTest; -import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.LocationMode; - +import com.microsoft.azure.storage.StorageException; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 8f18f67f70d..68ab6f56ddb 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -17,187 +17,346 @@ * under the License. */ -//apply plugin: 'nebula.provided-base' +import org.apache.tools.ant.taskdefs.condition.Os +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.Paths esplugin { description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' - classname 'org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin' -} - -configurations { - hadoop1 - hadoop2 + classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin' } versions << [ - 'hadoop1': '1.2.1', 'hadoop2': '2.7.1' ] -dependencies { - provided "org.elasticsearch:elasticsearch:${versions.elasticsearch}" - provided "org.apache.hadoop:hadoop-core:${versions.hadoop1}" - - // use Hadoop1 to compile and test things (a subset of Hadoop2) - testCompile "org.apache.hadoop:hadoop-core:${versions.hadoop1}" - testCompile "org.apache.hadoop:hadoop-test:${versions.hadoop1}" - // Hadoop dependencies - testCompile "commons-configuration:commons-configuration:1.6" - testCompile "commons-lang:commons-lang:${versions.commonslang}" - testCompile "commons-collections:commons-collections:3.2.2" - testCompile "commons-net:commons-net:1.4.1" - testCompile "org.mortbay.jetty:jetty:6.1.26" - testCompile "org.mortbay.jetty:jetty-util:6.1.26" - testCompile "org.mortbay.jetty:servlet-api:2.5-20081211" - testCompile "com.sun.jersey:jersey-core:1.8" - - - hadoop1("org.apache.hadoop:hadoop-core:${versions.hadoop1}") { - exclude module: "commons-cli" - exclude group: "com.sun.jersey" - exclude group: "org.mortbay.jetty" - exclude group: "tomcat" - exclude module: "commons-el" - exclude module: "hsqldb" - exclude group: "org.eclipse.jdt" - exclude module: "commons-beanutils" - exclude module: "commons-beanutils-core" - exclude module: "junit" - // provided by ES itself - exclude group: "log4j" - } - - hadoop2("org.apache.hadoop:hadoop-client:${versions.hadoop2}") { - exclude module: "commons-cli" - exclude group: "com.sun.jersey" - exclude group: "com.sun.jersey.contribs" - exclude group: "com.sun.jersey.jersey-test-framework" - exclude module: "guice" - exclude group: "org.mortbay.jetty" - exclude group: "tomcat" - exclude module: "commons-el" - exclude module: "hsqldb" - exclude group: "org.eclipse.jdt" - exclude module: "commons-beanutils" - exclude module: "commons-beanutils-core" - exclude module: "javax.servlet" - exclude module: "junit" - // provided by ES itself - exclude group: "log4j" - } - - hadoop2("org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}") { - exclude module: "guava" - exclude module: "junit" - // provided by ES itself - exclude group: "log4j" - } -} - -configurations.all { - resolutionStrategy { - force "commons-codec:commons-codec:${versions.commonscodec}" - force "commons-logging:commons-logging:${versions.commonslogging}" - force "commons-lang:commons-lang:2.6" - force "commons-httpclient:commons-httpclient:3.0.1" - force "org.codehaus.jackson:jackson-core-asl:1.8.8" - force "org.codehaus.jackson:jackson-mapper-asl:1.8.8" - force "com.google.code.findbugs:jsr305:3.0.0" - force "com.google.guava:guava:16.0.1" - force "org.slf4j:slf4j-api:1.7.10" - force "org.slf4j:slf4j-log4j12:1.7.10" - } +configurations { + hdfsFixture } +dependencies { + compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}" + compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}" + compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}" + compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}" + compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}" + compile 'org.apache.htrace:htrace-core:3.1.0-incubating' + compile 'com.google.guava:guava:16.0.1' + compile 'com.google.protobuf:protobuf-java:2.5.0' + compile 'commons-logging:commons-logging:1.1.3' + compile 'commons-collections:commons-collections:3.2.2' + compile 'commons-configuration:commons-configuration:1.6' + compile 'commons-io:commons-io:2.4' + compile 'commons-lang:commons-lang:2.6' + compile 'javax.servlet:servlet-api:2.5' + compile "org.slf4j:slf4j-api:${versions.slf4j}" + + hdfsFixture project(':test:fixtures:hdfs-fixture') +} dependencyLicenses { - mapping from: /hadoop-core.*/, to: 'hadoop-1' - mapping from: /hadoop-.*/, to: 'hadoop-2' + mapping from: /hadoop-.*/, to: 'hadoop' +} + +task hdfsFixture(type: org.elasticsearch.gradle.test.Fixture) { + dependsOn project.configurations.hdfsFixture + executable = new File(project.javaHome, 'bin/java') + env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }" + args 'hdfs.MiniHDFS', + baseDir +} + +integTest { + boolean fixtureSupported = false; + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + // hdfs fixture will not start without hadoop native libraries on windows + String nativePath = System.getenv("HADOOP_HOME") + if (nativePath != null) { + Path path = Paths.get(nativePath); + if (Files.isDirectory(path) && + Files.exists(path.resolve("bin").resolve("winutils.exe")) && + Files.exists(path.resolve("bin").resolve("hadoop.dll")) && + Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { + fixtureSupported = true + } else { + throw new IllegalStateException("HADOOP_HOME: " + path.toString() + " is invalid, does not contain hadoop native libraries in $HADOOP_HOME/bin"); + } + } + } else { + fixtureSupported = true + } + + if (fixtureSupported) { + dependsOn hdfsFixture + } else { + logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") + // just tests that the plugin loads + systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic' + } } compileJava.options.compilerArgs << '-Xlint:-deprecation,-rawtypes' -// main jar includes just the plugin classes -jar { - include "org/elasticsearch/plugin/hadoop/hdfs/*" -} +thirdPartyAudit.excludes = [ + // classes are missing, because we added hadoop jars one by one until tests pass. + 'com.google.gson.stream.JsonReader', + 'com.google.gson.stream.JsonWriter', + 'com.jcraft.jsch.ChannelExec', + 'com.jcraft.jsch.JSch', + 'com.jcraft.jsch.Logger', + 'com.jcraft.jsch.Session', + 'com.sun.jersey.api.ParamException', + 'com.sun.jersey.api.core.HttpContext', + 'com.sun.jersey.core.spi.component.ComponentContext', + 'com.sun.jersey.core.spi.component.ComponentScope', + 'com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable', + 'com.sun.jersey.spi.container.ContainerRequest', + 'com.sun.jersey.spi.container.ContainerRequestFilter', + 'com.sun.jersey.spi.container.ContainerResponseFilter', + 'com.sun.jersey.spi.container.ResourceFilter', + 'com.sun.jersey.spi.container.servlet.ServletContainer', + 'com.sun.jersey.spi.inject.Injectable', + 'com.sun.jersey.spi.inject.InjectableProvider', + 'io.netty.bootstrap.Bootstrap', + 'io.netty.bootstrap.ChannelFactory', + 'io.netty.bootstrap.ServerBootstrap', + 'io.netty.buffer.ByteBuf', + 'io.netty.buffer.Unpooled', + 'io.netty.channel.Channel', + 'io.netty.channel.ChannelFuture', + 'io.netty.channel.ChannelFutureListener', + 'io.netty.channel.ChannelHandler', + 'io.netty.channel.ChannelHandlerContext', + 'io.netty.channel.ChannelInboundHandlerAdapter', + 'io.netty.channel.ChannelInitializer', + 'io.netty.channel.ChannelPipeline', + 'io.netty.channel.EventLoopGroup', + 'io.netty.channel.SimpleChannelInboundHandler', + 'io.netty.channel.group.ChannelGroup', + 'io.netty.channel.group.ChannelGroupFuture', + 'io.netty.channel.group.DefaultChannelGroup', + 'io.netty.channel.nio.NioEventLoopGroup', + 'io.netty.channel.socket.SocketChannel', + 'io.netty.channel.socket.nio.NioServerSocketChannel', + 'io.netty.channel.socket.nio.NioSocketChannel', + 'io.netty.handler.codec.http.DefaultFullHttpRequest', + 'io.netty.handler.codec.http.DefaultFullHttpResponse', + 'io.netty.handler.codec.http.DefaultHttpResponse', + 'io.netty.handler.codec.http.HttpContent', + 'io.netty.handler.codec.http.HttpHeaders', + 'io.netty.handler.codec.http.HttpMethod', + 'io.netty.handler.codec.http.HttpRequest', + 'io.netty.handler.codec.http.HttpRequestDecoder', + 'io.netty.handler.codec.http.HttpRequestEncoder', + 'io.netty.handler.codec.http.HttpResponseEncoder', + 'io.netty.handler.codec.http.HttpResponseStatus', + 'io.netty.handler.codec.http.HttpVersion', + 'io.netty.handler.codec.http.QueryStringDecoder', + 'io.netty.handler.codec.string.StringEncoder', + 'io.netty.handler.ssl.SslHandler', + 'io.netty.handler.stream.ChunkedStream', + 'io.netty.handler.stream.ChunkedWriteHandler', + 'io.netty.util.concurrent.GlobalEventExecutor', + 'javax.ws.rs.core.Context', + 'javax.ws.rs.core.MediaType', + 'javax.ws.rs.core.MultivaluedMap', + 'javax.ws.rs.core.Response$ResponseBuilder', + 'javax.ws.rs.core.Response$Status', + 'javax.ws.rs.core.Response', + 'javax.ws.rs.core.StreamingOutput', + 'javax.ws.rs.core.UriBuilder', + 'javax.ws.rs.ext.ExceptionMapper', + 'jdiff.JDiff', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.avro.Schema', + 'org.apache.avro.file.DataFileReader', + 'org.apache.avro.file.FileReader', + 'org.apache.avro.file.SeekableInput', + 'org.apache.avro.generic.GenericDatumReader', + 'org.apache.avro.generic.GenericDatumWriter', + 'org.apache.avro.io.BinaryDecoder', + 'org.apache.avro.io.BinaryEncoder', + 'org.apache.avro.io.DatumReader', + 'org.apache.avro.io.DatumWriter', + 'org.apache.avro.io.DecoderFactory', + 'org.apache.avro.io.EncoderFactory', + 'org.apache.avro.io.JsonEncoder', + 'org.apache.avro.reflect.ReflectData', + 'org.apache.avro.reflect.ReflectDatumReader', + 'org.apache.avro.reflect.ReflectDatumWriter', + 'org.apache.avro.specific.SpecificDatumReader', + 'org.apache.avro.specific.SpecificDatumWriter', + 'org.apache.avro.specific.SpecificRecord', + 'org.apache.commons.beanutils.BeanUtils', + 'org.apache.commons.beanutils.DynaBean', + 'org.apache.commons.beanutils.DynaClass', + 'org.apache.commons.beanutils.DynaProperty', + 'org.apache.commons.beanutils.PropertyUtils', + 'org.apache.commons.compress.archivers.tar.TarArchiveEntry', + 'org.apache.commons.compress.archivers.tar.TarArchiveInputStream', + 'org.apache.commons.codec.DecoderException', + 'org.apache.commons.codec.binary.Base64', + 'org.apache.commons.codec.binary.Hex', + 'org.apache.commons.codec.digest.DigestUtils', + 'org.apache.commons.daemon.Daemon', + 'org.apache.commons.daemon.DaemonContext', + 'org.apache.commons.digester.AbstractObjectCreationFactory', + 'org.apache.commons.digester.CallMethodRule', + 'org.apache.commons.digester.Digester', + 'org.apache.commons.digester.ObjectCreationFactory', + 'org.apache.commons.digester.substitution.MultiVariableExpander', + 'org.apache.commons.digester.substitution.VariableSubstitutor', + 'org.apache.commons.digester.xmlrules.DigesterLoader', + 'org.apache.commons.httpclient.util.URIUtil', + 'org.apache.commons.jxpath.JXPathContext', + 'org.apache.commons.jxpath.ri.JXPathContextReferenceImpl', + 'org.apache.commons.jxpath.ri.QName', + 'org.apache.commons.jxpath.ri.compiler.NodeNameTest', + 'org.apache.commons.jxpath.ri.compiler.NodeTest', + 'org.apache.commons.jxpath.ri.compiler.NodeTypeTest', + 'org.apache.commons.jxpath.ri.model.NodeIterator', + 'org.apache.commons.jxpath.ri.model.NodePointer', + 'org.apache.commons.jxpath.ri.model.NodePointerFactory', + 'org.apache.commons.math3.util.ArithmeticUtils', + 'org.apache.commons.net.ftp.FTPClient', + 'org.apache.commons.net.ftp.FTPFile', + 'org.apache.commons.net.ftp.FTPReply', + 'org.apache.commons.net.util.SubnetUtils$SubnetInfo', + 'org.apache.commons.net.util.SubnetUtils', + 'org.apache.curator.ensemble.fixed.FixedEnsembleProvider', + 'org.apache.curator.framework.CuratorFramework', + 'org.apache.curator.framework.CuratorFrameworkFactory$Builder', + 'org.apache.curator.framework.CuratorFrameworkFactory', + 'org.apache.curator.framework.api.ACLBackgroundPathAndBytesable', + 'org.apache.curator.framework.api.ACLProvider', + 'org.apache.curator.framework.api.BackgroundPathAndBytesable', + 'org.apache.curator.framework.api.ChildrenDeletable', + 'org.apache.curator.framework.api.CreateBuilder', + 'org.apache.curator.framework.api.DeleteBuilder', + 'org.apache.curator.framework.api.ExistsBuilder', + 'org.apache.curator.framework.api.GetChildrenBuilder', + 'org.apache.curator.framework.api.GetDataBuilder', + 'org.apache.curator.framework.api.ProtectACLCreateModePathAndBytesable', + 'org.apache.curator.framework.api.SetDataBuilder', + 'org.apache.curator.framework.api.WatchPathable', + 'org.apache.curator.framework.imps.DefaultACLProvider', + 'org.apache.curator.framework.listen.ListenerContainer', + 'org.apache.curator.framework.recipes.cache.ChildData', + 'org.apache.curator.framework.recipes.cache.PathChildrenCache$StartMode', + 'org.apache.curator.framework.recipes.cache.PathChildrenCache', + 'org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent$Type', + 'org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent', + 'org.apache.curator.framework.recipes.cache.PathChildrenCacheListener', + 'org.apache.curator.framework.recipes.locks.Reaper$Mode', + 'org.apache.curator.framework.recipes.locks.Reaper', + 'org.apache.curator.framework.recipes.shared.SharedCount', + 'org.apache.curator.framework.recipes.shared.VersionedValue', + 'org.apache.curator.retry.ExponentialBackoffRetry', + 'org.apache.curator.retry.RetryNTimes', + 'org.apache.curator.utils.CloseableScheduledExecutorService', + 'org.apache.curator.utils.CloseableUtils', + 'org.apache.curator.utils.EnsurePath', + 'org.apache.curator.utils.PathUtils', + 'org.apache.curator.utils.ThreadUtils', + 'org.apache.curator.utils.ZKPaths', + 'org.apache.directory.server.kerberos.shared.keytab.Keytab', + 'org.apache.directory.server.kerberos.shared.keytab.KeytabEntry', + 'org.apache.http.NameValuePair', + 'org.apache.http.client.utils.URIBuilder', + 'org.apache.http.client.utils.URLEncodedUtils', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + 'org.apache.tools.ant.BuildException', + 'org.apache.tools.ant.DirectoryScanner', + 'org.apache.tools.ant.Task', + 'org.apache.tools.ant.taskdefs.Execute', + 'org.apache.tools.ant.types.FileSet', + 'org.apache.xml.serialize.OutputFormat', + 'org.apache.xml.serialize.XMLSerializer', + 'org.apache.zookeeper.AsyncCallback$StatCallback', + 'org.apache.zookeeper.AsyncCallback$StringCallback', + 'org.apache.zookeeper.CreateMode', + 'org.apache.zookeeper.KeeperException$Code', + 'org.apache.zookeeper.KeeperException', + 'org.apache.zookeeper.WatchedEvent', + 'org.apache.zookeeper.Watcher$Event$EventType', + 'org.apache.zookeeper.Watcher$Event$KeeperState', + 'org.apache.zookeeper.Watcher', + 'org.apache.zookeeper.ZKUtil', + 'org.apache.zookeeper.ZooDefs$Ids', + 'org.apache.zookeeper.ZooKeeper', + 'org.apache.zookeeper.data.ACL', + 'org.apache.zookeeper.data.Id', + 'org.apache.zookeeper.data.Stat', + 'org.codehaus.jackson.JsonEncoding', + 'org.codehaus.jackson.JsonFactory', + 'org.codehaus.jackson.JsonGenerator', + 'org.codehaus.jackson.JsonGenerator$Feature', + 'org.codehaus.jackson.JsonNode', + 'org.codehaus.jackson.map.MappingJsonFactory', + 'org.codehaus.jackson.map.ObjectMapper', + 'org.codehaus.jackson.map.ObjectReader', + 'org.codehaus.jackson.map.ObjectWriter', + 'org.codehaus.jackson.node.ContainerNode', + 'org.codehaus.jackson.type.TypeReference', + 'org.codehaus.jackson.util.MinimalPrettyPrinter', + 'org.fusesource.leveldbjni.JniDBFactory', + 'org.iq80.leveldb.DB', + 'org.iq80.leveldb.Options', + 'org.iq80.leveldb.WriteBatch', + 'org.mortbay.jetty.Connector', + 'org.mortbay.jetty.Handler', + 'org.mortbay.jetty.InclusiveByteRange', + 'org.mortbay.jetty.MimeTypes', + 'org.mortbay.jetty.NCSARequestLog', + 'org.mortbay.jetty.RequestLog', + 'org.mortbay.jetty.Server', + 'org.mortbay.jetty.handler.ContextHandler$SContext', + 'org.mortbay.jetty.handler.ContextHandler', + 'org.mortbay.jetty.handler.ContextHandlerCollection', + 'org.mortbay.jetty.handler.HandlerCollection', + 'org.mortbay.jetty.handler.RequestLogHandler', + 'org.mortbay.jetty.nio.SelectChannelConnector', + 'org.mortbay.jetty.security.SslSocketConnector', + 'org.mortbay.jetty.servlet.AbstractSessionManager', + 'org.mortbay.jetty.servlet.Context', + 'org.mortbay.jetty.servlet.DefaultServlet', + 'org.mortbay.jetty.servlet.FilterHolder', + 'org.mortbay.jetty.servlet.FilterMapping', + 'org.mortbay.jetty.servlet.ServletHandler', + 'org.mortbay.jetty.servlet.ServletHolder', + 'org.mortbay.jetty.servlet.SessionHandler', + 'org.mortbay.jetty.webapp.WebAppContext', + 'org.mortbay.log.Log', + 'org.mortbay.thread.QueuedThreadPool', + 'org.mortbay.util.MultiException', + 'org.mortbay.util.ajax.JSON$Convertible', + 'org.mortbay.util.ajax.JSON$Output', + 'org.mortbay.util.ajax.JSON', + 'org.znerd.xmlenc.XMLOutputter', -// hadoop jar (which actually depend on Hadoop) -task hadoopLinkedJar(type: Jar, dependsOn:jar) { - appendix "internal" - from sourceSets.main.output.classesDir - // exclude plugin - exclude "org/elasticsearch/plugin/hadoop/hdfs/*" -} + // internal java api: sun.net.dns.ResolverConfiguration + // internal java api: sun.net.util.IPAddressUtil + 'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver', + // internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer', + 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1', + 'org.apache.hadoop.io.nativeio.NativeIO', + 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm', + 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot', -bundlePlugin.dependsOn hadoopLinkedJar - -// configure 'bundle' as being w/o Hadoop deps -bundlePlugin { - into ("internal-libs") { - from hadoopLinkedJar.archivePath - } - - into ("hadoop-libs") { - from configurations.hadoop2.allArtifacts.files - from configurations.hadoop2 - } -} - - -task distZipHadoop1(type: Zip, dependsOn: [hadoopLinkedJar, jar]) { zipTask -> - from (zipTree(bundlePlugin.archivePath)) { - include "*" - include "internal-libs/**" - } - - description = "Builds archive (with Hadoop1 dependencies) suitable for download page." - classifier = "hadoop1" - - into ("hadoop-libs") { - from configurations.hadoop1.allArtifacts.files - from configurations.hadoop1 - } -} - -task distZipHadoop2(type: Zip, dependsOn: [hadoopLinkedJar, jar]) { zipTask -> - from (zipTree(bundlePlugin.archivePath)) { - include "*" - include "internal-libs/**" - } - - description = "Builds archive (with Hadoop2/YARN dependencies) suitable for download page." - classifier = "hadoop2" - - into ("hadoop-libs") { - from configurations.hadoop2.allArtifacts.files - from configurations.hadoop2 - } -} - -task distZipNoHadoop(type: Zip, dependsOn: [hadoopLinkedJar, jar]) { zipTask -> - from (zipTree(bundlePlugin.archivePath)) { - exclude "hadoop-libs/**" - } - - from sourceSets.main.output.resourcesDir - - description = "Builds archive (without any Hadoop dependencies) suitable for download page." - classifier = "lite" -} - - -artifacts { - archives bundlePlugin - 'default' bundlePlugin - archives distZipHadoop1 - archives distZipHadoop2 - archives distZipNoHadoop -} - -integTest { - cluster { - plugin(pluginProperties.extension.name, zipTree(distZipHadoop2.archivePath)) - } -} \ No newline at end of file + // internal java api: sun.nio.ch.DirectBuffer + // internal java api: sun.misc.Cleaner + 'org.apache.hadoop.io.nativeio.NativeIO$POSIX', + 'org.apache.hadoop.crypto.CryptoStreamUtils', + + // internal java api: sun.misc.SignalHandler + 'org.apache.hadoop.util.SignalLogger$Handler', +] diff --git a/plugins/repository-hdfs/licenses/commons-collections-3.2.2.jar.sha1 b/plugins/repository-hdfs/licenses/commons-collections-3.2.2.jar.sha1 new file mode 100644 index 00000000000..e9eeffde5da --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-collections-3.2.2.jar.sha1 @@ -0,0 +1 @@ +8ad72fe39fa8c91eaaf12aadb21e0c3661fe26d5 \ No newline at end of file diff --git a/plugins/discovery-azure/licenses/stax-LICENSE.txt b/plugins/repository-hdfs/licenses/commons-collections-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/stax-LICENSE.txt rename to plugins/repository-hdfs/licenses/commons-collections-LICENSE.txt diff --git a/plugins/repository-hdfs/licenses/commons-collections-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-collections-NOTICE.txt new file mode 100644 index 00000000000..7f8a95f8bbf --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-collections-NOTICE.txt @@ -0,0 +1,5 @@ +Apache Commons Collections +Copyright 2001-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/repository-hdfs/licenses/commons-configuration-1.6.jar.sha1 b/plugins/repository-hdfs/licenses/commons-configuration-1.6.jar.sha1 new file mode 100644 index 00000000000..44ad1f6d8da --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-configuration-1.6.jar.sha1 @@ -0,0 +1 @@ +32cadde23955d7681b0d94a2715846d20b425235 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/stax-api-LICENSE.txt b/plugins/repository-hdfs/licenses/commons-configuration-LICENSE.txt similarity index 100% rename from plugins/mapper-attachments/licenses/stax-api-LICENSE.txt rename to plugins/repository-hdfs/licenses/commons-configuration-LICENSE.txt diff --git a/plugins/repository-hdfs/licenses/commons-configuration-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-configuration-NOTICE.txt new file mode 100644 index 00000000000..3d6dfaec547 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-configuration-NOTICE.txt @@ -0,0 +1,5 @@ +Apache Commons Configuration +Copyright 2001-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/repository-hdfs/licenses/commons-io-2.4.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.4.jar.sha1 new file mode 100644 index 00000000000..2f5b30d0edb --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-io-2.4.jar.sha1 @@ -0,0 +1 @@ +b1b6ea3b7e4aa4f492509a4952029cd8e48019ad \ No newline at end of file diff --git a/modules/lang-groovy/licenses/groovy-all-LICENSE-CLI.txt b/plugins/repository-hdfs/licenses/commons-io-LICENSE.txt similarity index 100% rename from modules/lang-groovy/licenses/groovy-all-LICENSE-CLI.txt rename to plugins/repository-hdfs/licenses/commons-io-LICENSE.txt index 57bc88a15a0..d6456956733 100644 --- a/modules/lang-groovy/licenses/groovy-all-LICENSE-CLI.txt +++ b/plugins/repository-hdfs/licenses/commons-io-LICENSE.txt @@ -1,3 +1,4 @@ + Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ @@ -199,4 +200,3 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. - diff --git a/plugins/repository-hdfs/licenses/commons-io-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-io-NOTICE.txt new file mode 100644 index 00000000000..7b27516f07f --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-io-NOTICE.txt @@ -0,0 +1,6 @@ +Apache Commons IO +Copyright 2002-2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + diff --git a/plugins/repository-hdfs/licenses/commons-lang-2.6.jar.sha1 b/plugins/repository-hdfs/licenses/commons-lang-2.6.jar.sha1 new file mode 100644 index 00000000000..4ee9249d2b7 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-lang-2.6.jar.sha1 @@ -0,0 +1 @@ +0ce1edb914c94ebc388f086c6827e8bdeec71ac2 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-lang-LICENSE.txt b/plugins/repository-hdfs/licenses/commons-lang-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-lang-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/commons-lang-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-lang-NOTICE.txt new file mode 100644 index 00000000000..8dfa22157ab --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-lang-NOTICE.txt @@ -0,0 +1,9 @@ +Apache Commons Lang +Copyright 2001-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +This product includes software from the Spring Framework, +under the Apache License 2.0 (see: StringUtils.containsWhitespace()) + diff --git a/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 new file mode 100644 index 00000000000..5b8f029e582 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 @@ -0,0 +1 @@ +f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-logging-LICENSE.txt b/plugins/repository-hdfs/licenses/commons-logging-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-logging-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/commons-logging-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-logging-NOTICE.txt new file mode 100644 index 00000000000..556bd03951d --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-logging-NOTICE.txt @@ -0,0 +1,6 @@ +Apache Commons Logging +Copyright 2003-2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + diff --git a/plugins/repository-hdfs/licenses/guava-16.0.1.jar.sha1 b/plugins/repository-hdfs/licenses/guava-16.0.1.jar.sha1 new file mode 100644 index 00000000000..68f2b233a00 --- /dev/null +++ b/plugins/repository-hdfs/licenses/guava-16.0.1.jar.sha1 @@ -0,0 +1 @@ +5fa98cd1a63c99a44dd8d3b77e4762b066a5d0c5 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/guava-LICENSE.txt b/plugins/repository-hdfs/licenses/guava-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-hdfs/licenses/guava-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/guava-NOTICE.txt b/plugins/repository-hdfs/licenses/guava-NOTICE.txt new file mode 100644 index 00000000000..139597f9cb0 --- /dev/null +++ b/plugins/repository-hdfs/licenses/guava-NOTICE.txt @@ -0,0 +1,2 @@ + + diff --git a/plugins/repository-hdfs/licenses/hadoop-LICENSE.txt b/plugins/repository-hdfs/licenses/hadoop-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-hdfs/licenses/hadoop-NOTICE.txt b/plugins/repository-hdfs/licenses/hadoop-NOTICE.txt new file mode 100644 index 00000000000..62fc5816c99 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-NOTICE.txt @@ -0,0 +1,2 @@ +This product includes software developed by The Apache Software +Foundation (http://www.apache.org/). diff --git a/plugins/repository-hdfs/licenses/hadoop-annotations-2.7.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-annotations-2.7.1.jar.sha1 new file mode 100644 index 00000000000..660467a4c6e --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-annotations-2.7.1.jar.sha1 @@ -0,0 +1 @@ +2a77fe74ee056bf45598cf7e20cd624e8388e627 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-auth-2.7.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-auth-2.7.1.jar.sha1 new file mode 100644 index 00000000000..0161301ead2 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-auth-2.7.1.jar.sha1 @@ -0,0 +1 @@ +2515f339f97f1d7ba850485e06e395a58586bc2e \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-client-2.7.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-client-2.7.1.jar.sha1 new file mode 100644 index 00000000000..4c6dca8af49 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-client-2.7.1.jar.sha1 @@ -0,0 +1 @@ +dbc2faacd210e6a1e3eb7def6e42065c7457d960 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-common-2.7.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-common-2.7.1.jar.sha1 new file mode 100644 index 00000000000..64ff368db60 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-common-2.7.1.jar.sha1 @@ -0,0 +1 @@ +50580f5ebab60b1b318ad157f668d8e40a1cc0da \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-hdfs-2.7.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-hdfs-2.7.1.jar.sha1 new file mode 100644 index 00000000000..2d4954befaa --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-hdfs-2.7.1.jar.sha1 @@ -0,0 +1 @@ +11681de93a4cd76c841e352b7094f839b072a21f \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/htrace-core-3.1.0-incubating.jar.sha1 b/plugins/repository-hdfs/licenses/htrace-core-3.1.0-incubating.jar.sha1 new file mode 100644 index 00000000000..c742d8397cf --- /dev/null +++ b/plugins/repository-hdfs/licenses/htrace-core-3.1.0-incubating.jar.sha1 @@ -0,0 +1 @@ +f73606e7c9ede5802335c290bf47490ad6d51df3 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/htrace-core-LICENSE.txt b/plugins/repository-hdfs/licenses/htrace-core-LICENSE.txt new file mode 100644 index 00000000000..0befae88d4f --- /dev/null +++ b/plugins/repository-hdfs/licenses/htrace-core-LICENSE.txt @@ -0,0 +1,242 @@ +Apache HTrace (incubating) is Apache 2.0 Licensed. See below for licensing +of dependencies that are NOT Apache Licensed. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +The HTrace Owl logo is from http://www.clker.com/clipart-13653.html. It is +public domain. + +D3, a javascript library for manipulating data, used by htrace-hbase +is Copyright 2010-2014, Michael Bostock and BSD licensed: +https://github.com/mbostock/d3/blob/master/LICENSE + +Bootstrap, an html, css, and javascript framework, is +Copyright (c) 2011-2015 Twitter, Inc and MIT licensed: +https://github.com/twbs/bootstrap/blob/master/LICENSE + +underscore, a javascript library of functional programming helpers, is +(c) 2009-2014 Jeremy Ashkenas, DocumentCloud and Investigative Reporters +& Editors and an MIT license: +https://github.com/jashkenas/underscore/blob/master/LICENSE + +jquery, a javascript library, is Copyright jQuery Foundation and other +contributors, https://jquery.org/. The software consists of +voluntary contributions made by many individuals. For exact +contribution history, see the revision history +available at https://github.com/jquery/jquery +It is MIT licensed: +https://github.com/jquery/jquery/blob/master/LICENSE.txt + +backbone, is a javascript library, that is Copyright (c) 2010-2014 +Jeremy Ashkenas, DocumentCloud. It is MIT licensed: +https://github.com/jashkenas/backbone/blob/master/LICENSE + +moment.js is a front end time conversion project. +It is (c) 2011-2014 Tim Wood, Iskren Chernev, Moment.js contributors +and shared under the MIT license: +https://github.com/moment/moment/blob/develop/LICENSE + +CMP is an implementation of the MessagePack serialization format in +C. It is licensed under the MIT license: +https://github.com/camgunz/cmp/blob/master/LICENSE +See ./htrace-c/src/util/cmp.c and ./htrace-c/src/util/cmp.h. diff --git a/plugins/repository-hdfs/licenses/htrace-core-NOTICE.txt b/plugins/repository-hdfs/licenses/htrace-core-NOTICE.txt new file mode 100644 index 00000000000..845b6965e04 --- /dev/null +++ b/plugins/repository-hdfs/licenses/htrace-core-NOTICE.txt @@ -0,0 +1,13 @@ +Apache HTrace +Copyright 2015 The Apache Software Foundation + +This product includes software developed at The Apache Software +Foundation (http://www.apache.org/). + +In addition, this product includes software dependencies. See +the accompanying LICENSE.txt for a listing of dependencies +that are NOT Apache licensed (with pointers to their licensing) + +Apache HTrace includes an Apache Thrift connector to Zipkin. Zipkin +is a distributed tracing system that is Apache 2.0 Licensed. +Copyright 2012 Twitter, Inc. diff --git a/plugins/repository-hdfs/licenses/protobuf-java-2.5.0.jar.sha1 b/plugins/repository-hdfs/licenses/protobuf-java-2.5.0.jar.sha1 new file mode 100644 index 00000000000..71f918819e2 --- /dev/null +++ b/plugins/repository-hdfs/licenses/protobuf-java-2.5.0.jar.sha1 @@ -0,0 +1 @@ +a10732c76bfacdbd633a7eb0f7968b1059a65dfa \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/protobuf-java-LICENSE.txt b/plugins/repository-hdfs/licenses/protobuf-java-LICENSE.txt new file mode 100644 index 00000000000..49e7019ac5a --- /dev/null +++ b/plugins/repository-hdfs/licenses/protobuf-java-LICENSE.txt @@ -0,0 +1,10 @@ +Copyright (c) , +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-hdfs/licenses/protobuf-java-NOTICE.txt b/plugins/repository-hdfs/licenses/protobuf-java-NOTICE.txt new file mode 100644 index 00000000000..139597f9cb0 --- /dev/null +++ b/plugins/repository-hdfs/licenses/protobuf-java-NOTICE.txt @@ -0,0 +1,2 @@ + + diff --git a/plugins/repository-hdfs/licenses/servlet-api-2.5.jar.sha1 b/plugins/repository-hdfs/licenses/servlet-api-2.5.jar.sha1 new file mode 100644 index 00000000000..08564097ebe --- /dev/null +++ b/plugins/repository-hdfs/licenses/servlet-api-2.5.jar.sha1 @@ -0,0 +1 @@ +5959582d97d8b61f4d154ca9e495aafd16726e34 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/servlet-api-LICENSE.txt b/plugins/repository-hdfs/licenses/servlet-api-LICENSE.txt new file mode 100644 index 00000000000..2b93f7d2e73 --- /dev/null +++ b/plugins/repository-hdfs/licenses/servlet-api-LICENSE.txt @@ -0,0 +1,93 @@ + COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0 1. + +Definitions. + +1.1. Contributor means each individual or entity that creates or contributes to the creation of Modifications. + +1.2. Contributor Version means the combination of the Original Software, prior Modifications used by a Contributor (if any), and the Modifications made by that particular Contributor. + +1.3. Covered Software means (a) the Original Software, or (b) Modifications, or (c) the combination of files containing Original Software with files containing Modifications, in each case including portions thereof. + +1.4. Executable means the Covered Software in any form other than Source Code. + +1.5. Initial Developer means the individual or entity that first makes Original Software available under this License. + +1.6. Larger Work means a work which combines Covered Software or portions thereof with code not governed by the terms of this License. + +1.7. License means this document. + +1.8. Licensable means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently acquired, any and all of the rights conveyed herein. + +1.9. Modifications means the Source Code and Executable form of any of the following: A. Any file that results from an addition to, deletion from or modification of the contents of a file containing Original Software or previous Modifications; B. Any new file that contains any part of the Original Software or previous Modification; or C. Any new file that is contributed or otherwise made available under the terms of this License. + +1.10. Original Software means the Source Code and Executable form of computer software code that is originally released under this License. + +1.11. Patent Claims means any patent claim(s), now owned or hereafter acquired, including without limitation, method, process, and apparatus claims, in any patent Licensable by grantor. + +1.12. Source Code means (a) the common form of computer software code in which modifications are made and (b) associated documentation included in or with such code. + +1.13. You (or Your) means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, You includes any entity which controls, is controlled by, or is under common control with You. For purposes of this definition, control means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. + +2. License Grants. + + 2.1. The Initial Developer Grant. Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, the Initial Developer hereby grants You a world-wide, royalty-free, non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) Licensable by Initial Developer, to use, reproduce, modify, display, perform, sublicense and distribute the Original Software (or portions thereof), with or without Modifications, and/or as part of a Larger Work; and + +(b) under Patent Claims infringed by the making, using or selling of Original Software, to make, have made, use, practice, sell, and offer for sale, and/or otherwise dispose of the Original Software (or portions thereof); + + (c) The licenses granted in Sections 2.1(a) and (b) are effective on the date Initial Developer first distributes or otherwise makes the Original Software available to a third party under the terms of this License; + + (d) Notwithstanding Section 2.1(b) above, no patent license is granted: (1) for code that You delete from the Original Software, or (2) for infringements caused by: (i) the modification of the Original Software, or (ii) the combination of the Original Software with other software or devices. + +2.2. Contributor Grant. Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) Licensable by Contributor to use, reproduce, modify, display, perform, sublicense and distribute the Modifications created by such Contributor (or portions thereof), either on an unmodified basis, with other Modifications, as Covered Software and/or as part of a Larger Work; and + +(b) under Patent Claims infringed by the making, using, or selling of Modifications made by that Contributor either alone and/or in combination with its Contributor Version (or portions of such combination), to make, use, sell, offer for sale, have made, and/or otherwise dispose of: (1) Modifications made by that Contributor (or portions thereof); and (2) the combination of Modifications made by that Contributor with its Contributor Version (or portions of such combination). + +(c) The licenses granted in Sections 2.2(a) and 2.2(b) are effective on the date Contributor first distributes or otherwise makes the Modifications available to a third party. + +(d) Notwithstanding Section 2.2(b) above, no patent license is granted: (1) for any code that Contributor has deleted from the Contributor Version; (2) for infringements caused by: (i) third party modifications of Contributor Version, or (ii) the combination of Modifications made by that Contributor with other software (except as part of the Contributor Version) or other devices; or (3) under Patent Claims infringed by Covered Software in the absence of Modifications made by that Contributor. + +3. Distribution Obligations. + +3.1. Availability of Source Code. Any Covered Software that You distribute or otherwise make available in Executable form must also be made available in Source Code form and that Source Code form must be distributed only under the terms of this License. You must include a copy of this License with every copy of the Source Code form of the Covered Software You distribute or otherwise make available. You must inform recipients of any such Covered Software in Executable form as to how they can obtain such Covered Software in Source Code form in a reasonable manner on or through a medium customarily used for software exchange. + +3.2. Modifications. The Modifications that You create or to which You contribute are governed by the terms of this License. You represent that You believe Your Modifications are Your original creation(s) and/or You have sufficient rights to grant the rights conveyed by this License. + +3.3. Required Notices. You must include a notice in each of Your Modifications that identifies You as the Contributor of the Modification. You may not remove or alter any copyright, patent or trademark notices contained within the Covered Software, or any notices of licensing or any descriptive text giving attribution to any Contributor or the Initial Developer. + +3.4. Application of Additional Terms. You may not offer or impose any terms on any Covered Software in Source Code form that alters or restricts the applicable version of this License or the recipients rights hereunder. You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, you may do so only on Your own behalf, and not on behalf of the Initial Developer or any Contributor. You must make it absolutely clear that any such warranty, support, indemnity or liability obligation is offered by You alone, and You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of warranty, support, indemnity or liability terms You offer. + +3.5. Distribution of Executable Versions. You may distribute the Executable form of the Covered Software under the terms of this License or under the terms of a license of Your choice, which may contain terms different from this License, provided that You are in compliance with the terms of this License and that the license for the Executable form does not attempt to limit or alter the recipients rights in the Source Code form from the rights set forth in this License. If You distribute the Covered Software in Executable form under a different license, You must make it absolutely clear that any terms which differ from this License are offered by You alone, not by the Initial Developer or Contributor. You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of any such terms You offer. + +3.6. Larger Works. You may create a Larger Work by combining Covered Software with other code not governed by the terms of this License and distribute the Larger Work as a single product. In such a case, You must make sure the requirements of this License are fulfilled for the Covered Software. + +4. Versions of the License. + +4.1. New Versions. Sun Microsystems, Inc. is the initial license steward and may publish revised and/or new versions of this License from time to time. Each version will be given a distinguishing version number. Except as provided in Section 4.3, no one other than the license steward has the right to modify this License. + +4.2. Effect of New Versions. You may always continue to use, distribute or otherwise make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. If the Initial Developer includes a notice in the Original Software prohibiting it from being distributed or otherwise made available under any subsequent version of the License, You must distribute and make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. Otherwise, You may also choose to use, distribute or otherwise make the Covered Software available under the terms of any subsequent version of the License published by the license steward. + +4.3. Modified Versions. When You are an Initial Developer and You want to create a new license for Your Original Software, You may create and use a modified version of this License if You: (a) rename the license and remove any references to the name of the license steward (except to note that the license differs from this License); and (b) otherwise make it clear that the license contains terms which differ from this License. + +5. DISCLAIMER OF WARRANTY. COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN AS IS BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. + +6. TERMINATION. + +6.1. This License and the rights granted hereunder will terminate automatically if You fail to comply with terms herein and fail to cure such breach within 30 days of becoming aware of the breach. Provisions which, by their nature, must remain in effect beyond the termination of this License shall survive. + +6.2. If You assert a patent infringement claim (excluding declaratory judgment actions) against Initial Developer or a Contributor (the Initial Developer or Contributor against whom You assert such claim is referred to as Participant) alleging that the Participant Software (meaning the Contributor Version where the Participant is a Contributor or the Original Software where the Participant is the Initial Developer) directly or indirectly infringes any patent, then any and all rights granted directly or indirectly to You by such Participant, the Initial Developer (if the Initial Developer is not the Participant) and all Contributors under Sections 2.1 and/or 2.2 of this License shall, upon 60 days notice from Participant terminate prospectively and automatically at the expiration of such 60 day notice period, unless if within such 60 day period You withdraw Your claim with respect to the Participant Software against such Participant either unilaterally or pursuant to a written agreement with Participant. + +6.3. In the event of termination under Sections 6.1 or 6.2 above, all end user licenses that have been validly granted by You or any distributor hereunder prior to termination (excluding licenses granted to You by any distributor) shall survive termination. + +7. LIMITATION OF LIABILITY. UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOST PROFITS, LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH PARTYS NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. + +8. U.S. GOVERNMENT END USERS. The Covered Software is a commercial item, as that term is defined in 48 C.F.R. 2.101 (Oct. 1995), consisting of commercial computer software (as that term is defined at 48 C.F.R. 252.227-7014(a)(1)) and commercial computer software documentation as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all U.S. Government End Users acquire Covered Software with only those rights set forth herein. This U.S. Government Rights clause is in lieu of, and supersedes, any other FAR, DFAR, or other clause or provision that addresses Government rights in computer software under this License. + +9. MISCELLANEOUS. This License represents the complete agreement concerning subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. This License shall be governed by the law of the jurisdiction specified in a notice contained within the Original Software (except to the extent applicable law, if any, provides otherwise), excluding such jurisdictions conflict-of-law provisions. Any litigation relating to this License shall be subject to the jurisdiction of the courts located in the jurisdiction and venue specified in a notice contained within the Original Software, with the losing party responsible for costs, including, without limitation, court costs and reasonable attorneys fees and expenses. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not apply to this License. You agree that You alone are responsible for compliance with the United States export administration regulations (and the export control laws and regulation of any other countries) when You use, distribute or otherwise make available any Covered Software. + +10. RESPONSIBILITY FOR CLAIMS. As between Initial Developer and the Contributors, each party is responsible for claims and damages arising, directly or indirectly, out of its utilization of rights under this License and You agree to work with Initial Developer and Contributors to distribute such responsibility on an equitable basis. Nothing herein is intended or shall be deemed to constitute any admission of liability. + +NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) The code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. diff --git a/plugins/repository-hdfs/licenses/servlet-api-NOTICE.txt b/plugins/repository-hdfs/licenses/servlet-api-NOTICE.txt new file mode 100644 index 00000000000..139597f9cb0 --- /dev/null +++ b/plugins/repository-hdfs/licenses/servlet-api-NOTICE.txt @@ -0,0 +1,2 @@ + + diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java deleted file mode 100644 index 9b65f7bec2f..00000000000 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugin.hadoop.hdfs; - -import java.io.IOException; -import java.lang.reflect.Method; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.net.URLClassLoader; -import java.nio.file.Path; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; - -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesModule; -import org.elasticsearch.repositories.Repository; - -// -// Note this plugin is somewhat special as Hadoop itself loads a number of libraries and thus requires a number of permissions to run even in client mode. -// This poses two problems: -// - Hadoop itself comes with tons of jars, many providing the same classes across packages. In particular Hadoop 2 provides package annotations in the same -// package across jars which trips JarHell. Thus, to allow Hadoop jars to load, the plugin uses a dedicated CL which picks them up from the hadoop-libs folder. -// - The issue though with using a different CL is that it picks up the jars from a different location / codeBase and thus it does not fall under the plugin -// permissions. In other words, the plugin permissions don't apply to the hadoop libraries. -// There are different approaches here: -// - implement a custom classloader that loads the jars but 'lies' about the codesource. It is doable but since URLClassLoader is locked down, one would -// would have to implement the whole jar opening and loading from it. Not impossible but still fairly low-level. -// Further more, even if the code has the proper credentials, it needs to use the proper Privileged blocks to use its full permissions which does not -// happen in the Hadoop code base. -// - use a different Policy. Works but the Policy is JVM wide and thus the code needs to be quite efficient - quite a bit impact to cover just some plugin -// libraries -// - use a DomainCombiner. This doesn't change the semantics (it's clear where the code is loaded from, etc..) however it gives us a scoped, fine-grained -// callback on handling the permission intersection for secured calls. Note that DC works only in the current PAC call - the moment another PA is used, -// the domain combiner is going to be ignored (unless the caller specifically uses it). Due to its scoped impact and official Java support, this approach -// was used. - -// ClassLoading info -// - package plugin.hadoop.hdfs is part of the plugin -// - all the other packages are assumed to be in the nested Hadoop CL. - -// Code -public class HdfsPlugin extends Plugin { - - @Override - public String name() { - return "repository-hdfs"; - } - - @Override - public String description() { - return "HDFS Repository Plugin"; - } - - @SuppressWarnings("unchecked") - public void onModule(RepositoriesModule repositoriesModule) { - String baseLib = Utils.detectLibFolder(); - List cp = getHadoopClassLoaderPath(baseLib); - - ClassLoader hadoopCL = URLClassLoader.newInstance(cp.toArray(new URL[cp.size()]), getClass().getClassLoader()); - - Class repository = null; - try { - repository = (Class) hadoopCL.loadClass("org.elasticsearch.repositories.hdfs.HdfsRepository"); - } catch (ClassNotFoundException cnfe) { - throw new IllegalStateException("Cannot load plugin class; is the plugin class setup correctly?", cnfe); - } - - repositoriesModule.registerRepository("hdfs", repository, BlobStoreIndexShardRepository.class); - Loggers.getLogger(HdfsPlugin.class).info("Loaded Hadoop [{}] libraries from {}", getHadoopVersion(hadoopCL), baseLib); - } - - protected List getHadoopClassLoaderPath(String baseLib) { - List cp = new ArrayList<>(); - // add plugin internal jar - discoverJars(createURI(baseLib, "internal-libs"), cp, false); - // add Hadoop jars - discoverJars(createURI(baseLib, "hadoop-libs"), cp, true); - return cp; - } - - private String getHadoopVersion(ClassLoader hadoopCL) { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - // unprivileged code such as scripts do not have SpecialPermission - sm.checkPermission(new SpecialPermission()); - } - - return AccessController.doPrivileged(new PrivilegedAction() { - @Override - public String run() { - // Hadoop 2 relies on TCCL to determine the version - ClassLoader tccl = Thread.currentThread().getContextClassLoader(); - try { - Thread.currentThread().setContextClassLoader(hadoopCL); - return doGetHadoopVersion(hadoopCL); - } finally { - Thread.currentThread().setContextClassLoader(tccl); - } - } - }, Utils.hadoopACC()); - } - - private String doGetHadoopVersion(ClassLoader hadoopCL) { - String version = "Unknown"; - - Class clz = null; - try { - clz = hadoopCL.loadClass("org.apache.hadoop.util.VersionInfo"); - } catch (ClassNotFoundException cnfe) { - // unknown - } - if (clz != null) { - try { - Method method = clz.getMethod("getVersion"); - version = method.invoke(null).toString(); - } catch (Exception ex) { - // class has changed, ignore - } - } - - return version; - } - - private URI createURI(String base, String suffix) { - String location = base + suffix; - try { - return new URI(location); - } catch (URISyntaxException ex) { - throw new IllegalStateException(String.format(Locale.ROOT, "Cannot detect plugin folder; [%s] seems invalid", location), ex); - } - } - - @SuppressForbidden(reason = "discover nested jar") - private void discoverJars(URI libPath, List cp, boolean optional) { - try { - Path[] jars = FileSystemUtils.files(PathUtils.get(libPath), "*.jar"); - - for (Path path : jars) { - cp.add(path.toUri().toURL()); - } - } catch (IOException ex) { - if (!optional) { - throw new IllegalStateException("Cannot compute plugin classpath", ex); - } - } - } -} \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java deleted file mode 100644 index 101025d029e..00000000000 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/Utils.java +++ /dev/null @@ -1,84 +0,0 @@ -package org.elasticsearch.plugin.hadoop.hdfs; - -import java.net.URL; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.DomainCombiner; -import java.security.PrivilegedAction; -import java.security.ProtectionDomain; - -import org.elasticsearch.SpecialPermission; - -public abstract class Utils { - - protected static AccessControlContext hadoopACC() { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - // unprivileged code such as scripts do not have SpecialPermission - sm.checkPermission(new SpecialPermission()); - } - - return AccessController.doPrivileged(new PrivilegedAction() { - @Override - public AccessControlContext run() { - return new AccessControlContext(AccessController.getContext(), new HadoopDomainCombiner()); - } - }); - } - - private static class HadoopDomainCombiner implements DomainCombiner { - - private static String BASE_LIB = detectLibFolder(); - - @Override - public ProtectionDomain[] combine(ProtectionDomain[] currentDomains, ProtectionDomain[] assignedDomains) { - for (ProtectionDomain pd : assignedDomains) { - if (pd.getCodeSource().getLocation().toString().startsWith(BASE_LIB)) { - return assignedDomains; - } - } - - return currentDomains; - } - } - - static String detectLibFolder() { - ClassLoader cl = Utils.class.getClassLoader(); - - // we could get the URL from the URLClassloader directly - // but that can create issues when running the tests from the IDE - // we could detect that by loading resources but that as well relies on - // the JAR URL - String classToLookFor = HdfsPlugin.class.getName().replace(".", "/").concat(".class"); - URL classURL = cl.getResource(classToLookFor); - if (classURL == null) { - throw new IllegalStateException("Cannot detect itself; something is wrong with this ClassLoader " + cl); - } - - String base = classURL.toString(); - - // extract root - // typically a JAR URL - int index = base.indexOf("!/"); - if (index > 0) { - base = base.substring(0, index); - // remove its prefix (jar:) - base = base.substring(4); - // remove the trailing jar - index = base.lastIndexOf("/"); - base = base.substring(0, index + 1); - } - // not a jar - something else, do a best effort here - else { - // remove the class searched - base = base.substring(0, base.length() - classToLookFor.length()); - } - - // append / - if (!base.endsWith("/")) { - base = base.concat("/"); - } - - return base; - } -} \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index f71ca7020a8..135e2f77810 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -18,8 +18,11 @@ */ package org.elasticsearch.repositories.hdfs; +import org.apache.hadoop.fs.CreateFlag; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Options.CreateOpts; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.elasticsearch.common.Nullable; @@ -27,35 +30,35 @@ import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; +import org.elasticsearch.repositories.hdfs.HdfsBlobStore.Operation; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; import java.util.Collections; +import java.util.EnumSet; import java.util.LinkedHashMap; -import java.util.Locale; import java.util.Map; -public class HdfsBlobContainer extends AbstractBlobContainer { +final class HdfsBlobContainer extends AbstractBlobContainer { + private final HdfsBlobStore store; + private final Path path; + private final int bufferSize; - protected final HdfsBlobStore blobStore; - protected final Path path; - - public HdfsBlobContainer(BlobPath blobPath, HdfsBlobStore blobStore, Path path) { + HdfsBlobContainer(BlobPath blobPath, HdfsBlobStore store, Path path, int bufferSize) { super(blobPath); - this.blobStore = blobStore; + this.store = store; this.path = path; + this.bufferSize = bufferSize; } @Override public boolean blobExists(String blobName) { try { - return SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + return store.execute(new Operation() { @Override - public Boolean doInHdfs(FileSystem fs) throws IOException { - return fs.exists(new Path(path, blobName)); + public Boolean run(FileContext fileContext) throws IOException { + return fileContext.util().exists(new Path(path, blobName)); } }); } catch (Exception e) { @@ -65,46 +68,62 @@ public class HdfsBlobContainer extends AbstractBlobContainer { @Override public void deleteBlob(String blobName) throws IOException { - SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { - @Override - public Boolean doInHdfs(FileSystem fs) throws IOException { - return fs.delete(new Path(path, blobName), true); - } - }); + try { + store.execute(new Operation() { + @Override + public Boolean run(FileContext fileContext) throws IOException { + return fileContext.delete(new Path(path, blobName), true); + } + }); + } catch (FileNotFoundException ok) { + // behaves like Files.deleteIfExists + } } @Override public void move(String sourceBlobName, String targetBlobName) throws IOException { - boolean rename = SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + store.execute(new Operation() { @Override - public Boolean doInHdfs(FileSystem fs) throws IOException { - return fs.rename(new Path(path, sourceBlobName), new Path(path, targetBlobName)); + public Void run(FileContext fileContext) throws IOException { + fileContext.rename(new Path(path, sourceBlobName), new Path(path, targetBlobName)); + return null; } }); - - if (!rename) { - throw new IOException(String.format(Locale.ROOT, "can not move blob from [%s] to [%s]", sourceBlobName, targetBlobName)); - } } @Override public InputStream readBlob(String blobName) throws IOException { // FSDataInputStream does buffering internally - return SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + return store.execute(new Operation() { @Override - public InputStream doInHdfs(FileSystem fs) throws IOException { - return fs.open(new Path(path, blobName), blobStore.bufferSizeInBytes()); + public InputStream run(FileContext fileContext) throws IOException { + return fileContext.open(new Path(path, blobName), bufferSize); } }); } @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + store.execute(new Operation() { @Override - public Void doInHdfs(FileSystem fs) throws IOException { - try (OutputStream stream = createOutput(blobName)) { - Streams.copy(inputStream, stream); + public Void run(FileContext fileContext) throws IOException { + Path blob = new Path(path, blobName); + // we pass CREATE, which means it fails if a blob already exists. + // NOTE: this behavior differs from FSBlobContainer, which passes TRUNCATE_EXISTING + // that should be fixed there, no need to bring truncation into this, give the user an error. + EnumSet flags = EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK); + CreateOpts[] opts = { CreateOpts.bufferSize(bufferSize) }; + try (FSDataOutputStream stream = fileContext.create(blob, flags, opts)) { + int bytesRead; + byte[] buffer = new byte[bufferSize]; + while ((bytesRead = inputStream.read(buffer)) != -1) { + stream.write(buffer, 0, bytesRead); + // For safety we also hsync each write as well, because of its docs: + // SYNC_BLOCK - to force closed blocks to the disk device + // "In addition Syncable.hsync() should be called after each write, + // if true synchronous behavior is required" + stream.hsync(); + } } return null; } @@ -112,40 +131,18 @@ public class HdfsBlobContainer extends AbstractBlobContainer { } @Override - public void writeBlob(String blobName, BytesReference bytes) throws IOException { - SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { + public Map listBlobsByPrefix(final @Nullable String prefix) throws IOException { + FileStatus[] files = store.execute(new Operation() { @Override - public Void doInHdfs(FileSystem fs) throws IOException { - try (OutputStream stream = createOutput(blobName)) { - bytes.writeTo(stream); - } - return null; - } - }); - } - - private OutputStream createOutput(String blobName) throws IOException { - Path file = new Path(path, blobName); - // FSDataOutputStream does buffering internally - return blobStore.fileSystemFactory().getFileSystem().create(file, true, blobStore.bufferSizeInBytes()); - } - - @Override - public Map listBlobsByPrefix(final @Nullable String blobNamePrefix) throws IOException { - FileStatus[] files = SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { - @Override - public FileStatus[] doInHdfs(FileSystem fs) throws IOException { - return fs.listStatus(path, new PathFilter() { + public FileStatus[] run(FileContext fileContext) throws IOException { + return (fileContext.util().listStatus(path, new PathFilter() { @Override public boolean accept(Path path) { - return path.getName().startsWith(blobNamePrefix); + return prefix == null || path.getName().startsWith(prefix); } - }); + })); } }); - if (files == null || files.length == 0) { - return Collections.emptyMap(); - } Map map = new LinkedHashMap(); for (FileStatus file : files) { map.put(file.getPath().getName(), new PlainBlobMetaData(file.getPath().getName(), file.getLen())); @@ -155,19 +152,6 @@ public class HdfsBlobContainer extends AbstractBlobContainer { @Override public Map listBlobs() throws IOException { - FileStatus[] files = SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback() { - @Override - public FileStatus[] doInHdfs(FileSystem fs) throws IOException { - return fs.listStatus(path); - } - }); - if (files == null || files.length == 0) { - return Collections.emptyMap(); - } - Map map = new LinkedHashMap(); - for (FileStatus file : files) { - map.put(file.getPath().getName(), new PlainBlobMetaData(file.getPath().getName(), file.getLen())); - } - return Collections.unmodifiableMap(map); + return listBlobsByPrefix(null); } } \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index b75485fa7fe..23404a7c360 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -18,46 +18,63 @@ */ package org.elasticsearch.repositories.hdfs; -import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileAlreadyExistsException; +import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.util.concurrent.Executor; +import java.lang.reflect.ReflectPermission; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; -public class HdfsBlobStore extends AbstractComponent implements BlobStore { +import javax.security.auth.AuthPermission; - private final FileSystemFactory ffs; - private final Path rootHdfsPath; - private final ThreadPool threadPool; - private final int bufferSizeInBytes; +final class HdfsBlobStore implements BlobStore { - public HdfsBlobStore(Settings settings, FileSystemFactory ffs, Path path, ThreadPool threadPool) throws IOException { - super(settings); - this.ffs = ffs; - this.rootHdfsPath = path; - this.threadPool = threadPool; + private final Path root; + private final FileContext fileContext; + private final int bufferSize; + private volatile boolean closed; - this.bufferSizeInBytes = (int) settings.getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes(); - - mkdirs(path); + HdfsBlobStore(FileContext fileContext, String path, int bufferSize) throws IOException { + this.fileContext = fileContext; + this.bufferSize = bufferSize; + this.root = execute(new Operation() { + @Override + public Path run(FileContext fileContext) throws IOException { + return fileContext.makeQualified(new Path(path)); + } + }); + try { + mkdirs(root); + } catch (FileAlreadyExistsException ok) { + // behaves like Files.createDirectories + } } private void mkdirs(Path path) throws IOException { - SecurityUtils.execute(ffs, new FsCallback() { + execute(new Operation() { @Override - public Void doInHdfs(FileSystem fs) throws IOException { - if (!fs.exists(path)) { - fs.mkdirs(path); - } + public Void run(FileContext fileContext) throws IOException { + fileContext.mkdir(path, null, true); + return null; + } + }); + } + + @Override + public void delete(BlobPath path) throws IOException { + execute(new Operation() { + @Override + public Void run(FileContext fc) throws IOException { + fc.delete(translateToHdfsPath(path), true); return null; } }); @@ -65,45 +82,20 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore { @Override public String toString() { - return rootHdfsPath.toUri().toString(); - } - - public FileSystemFactory fileSystemFactory() { - return ffs; - } - - public Path path() { - return rootHdfsPath; - } - - public Executor executor() { - return threadPool.executor(ThreadPool.Names.SNAPSHOT); - } - - public int bufferSizeInBytes() { - return bufferSizeInBytes; + return root.toUri().toString(); } @Override public BlobContainer blobContainer(BlobPath path) { - return new HdfsBlobContainer(path, this, buildHdfsPath(path)); - } - - @Override - public void delete(BlobPath path) throws IOException { - SecurityUtils.execute(ffs, new FsCallback() { - @Override - public Void doInHdfs(FileSystem fs) throws IOException { - fs.delete(translateToHdfsPath(path), true); - return null; - } - }); + return new HdfsBlobContainer(path, this, buildHdfsPath(path), bufferSize); } private Path buildHdfsPath(BlobPath blobPath) { final Path path = translateToHdfsPath(blobPath); try { mkdirs(path); + } catch (FileAlreadyExistsException ok) { + // behaves like Files.createDirectories } catch (IOException ex) { throw new ElasticsearchException("failed to create blob container", ex); } @@ -111,15 +103,47 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore { } private Path translateToHdfsPath(BlobPath blobPath) { - Path path = path(); + Path path = root; for (String p : blobPath) { path = new Path(path, p); } return path; } + + interface Operation { + V run(FileContext fileContext) throws IOException; + } + + /** + * Executes the provided operation against this store + */ + // we can do FS ops with only two elevated permissions: + // 1) hadoop dynamic proxy is messy with access rules + // 2) allow hadoop to add credentials to our Subject + V execute(Operation operation) throws IOException { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + // unprivileged code such as scripts do not have SpecialPermission + sm.checkPermission(new SpecialPermission()); + } + if (closed) { + throw new AlreadyClosedException("HdfsBlobStore is closed: " + this); + } + try { + return AccessController.doPrivileged(new PrivilegedExceptionAction() { + @Override + public V run() throws IOException { + return operation.run(fileContext); + } + }, null, new ReflectPermission("suppressAccessChecks"), + new AuthPermission("modifyPrivateCredentials")); + } catch (PrivilegedActionException pae) { + throw (IOException) pae.getException(); + } + } @Override public void close() { - // + closed = true; } } \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java new file mode 100644 index 00000000000..ccd0b405ff2 --- /dev/null +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java @@ -0,0 +1,100 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.hdfs; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; + +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.RepositoriesModule; + +// Code +public final class HdfsPlugin extends Plugin { + + // initialize some problematic classes with elevated privileges + static { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Void run() { + return evilHadoopInit(); + } + }); + } + + @SuppressForbidden(reason = "Needs a security hack for hadoop on windows, until HADOOP-XXXX is fixed") + private static Void evilHadoopInit() { + // hack: on Windows, Shell's clinit has a similar problem that on unix, + // but here we can workaround it for now by setting hadoop home + // on unix: we still want to set this to something we control, because + // if the user happens to have HADOOP_HOME in their environment -> checkHadoopHome goes boom + // TODO: remove THIS when hadoop is fixed + Path hadoopHome = null; + String oldValue = null; + try { + hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath(); + oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString()); + Class.forName("org.apache.hadoop.security.UserGroupInformation"); + Class.forName("org.apache.hadoop.util.StringUtils"); + Class.forName("org.apache.hadoop.util.ShutdownHookManager"); + Class.forName("org.apache.hadoop.conf.Configuration"); + Class.forName("org.apache.hadoop.hdfs.protocol.HdfsConstants"); + Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck"); + } catch (ClassNotFoundException | IOException e) { + throw new RuntimeException(e); + } finally { + // try to clean up the hack + if (oldValue == null) { + System.clearProperty("hadoop.home.dir"); + } else { + System.setProperty("hadoop.home.dir", oldValue); + } + try { + // try to clean up our temp dir too if we can + if (hadoopHome != null) { + Files.delete(hadoopHome); + } + } catch (IOException thisIsBestEffort) {} + } + return null; + } + + @Override + public String name() { + return "repository-hdfs"; + } + + @Override + public String description() { + return "HDFS Repository Plugin"; + } + + public void onModule(RepositoriesModule repositoriesModule) { + repositoriesModule.registerRepository("hdfs", HdfsRepository.class, BlobStoreIndexShardRepository.class); + } +} diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 11081445fd4..1e8e267bd41 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -19,23 +19,22 @@ package org.elasticsearch.repositories.hdfs; import java.io.IOException; -import java.net.MalformedURLException; +import java.lang.reflect.Constructor; import java.net.URI; -import java.net.URL; -import java.nio.file.Files; import java.security.AccessController; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; +import java.security.Principal; +import java.security.PrivilegedAction; +import java.util.Collections; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; +import javax.security.auth.Subject; + import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.security.UserGroupInformation; -import org.elasticsearch.ElasticsearchException; +import org.apache.hadoop.fs.AbstractFileSystem; +import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Strings; @@ -43,190 +42,118 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.repositories.RepositoryName; import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import org.elasticsearch.threadpool.ThreadPool; -public class HdfsRepository extends BlobStoreRepository implements FileSystemFactory { +public final class HdfsRepository extends BlobStoreRepository { - public final static String TYPE = "hdfs"; - - private final HdfsBlobStore blobStore; - private final BlobPath basePath; + private final BlobPath basePath = BlobPath.cleanPath(); + private final RepositorySettings repositorySettings; private final ByteSizeValue chunkSize; private final boolean compress; - private final RepositorySettings repositorySettings; - private FileSystem fs; + + private HdfsBlobStore blobStore; + + // buffer size passed to HDFS read/write methods + // TODO: why 100KB? + private static final ByteSizeValue DEFAULT_BUFFER_SIZE = new ByteSizeValue(100, ByteSizeUnit.KB); @Inject - public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, ThreadPool threadPool) throws IOException { + public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); - this.repositorySettings = repositorySettings; - String path = repositorySettings.settings().get("path", settings.get("path")); - if (path == null) { - throw new IllegalArgumentException("no 'path' defined for hdfs snapshot/restore"); - } - - // get configuration - fs = getFileSystem(); - Path hdfsPath = SecurityUtils.execute(fs, new FsCallback() { - @Override - public Path doInHdfs(FileSystem fs) throws IOException { - return fs.makeQualified(new Path(path)); - } - }); - this.basePath = BlobPath.cleanPath(); - - logger.debug("Using file-system [{}] for URI [{}], path [{}]", fs, fs.getUri(), hdfsPath); - blobStore = new HdfsBlobStore(settings, this, hdfsPath, threadPool); - this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("chunk_size", null)); - this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("compress", false)); + this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", null); + this.compress = repositorySettings.settings().getAsBoolean("compress", false); } - // as the FileSystem is long-lived and might go away, make sure to check it before it's being used. @Override - public FileSystem getFileSystem() throws IOException { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - // unprivileged code such as scripts do not have SpecialPermission - sm.checkPermission(new SpecialPermission()); + protected void doStart() { + String uriSetting = repositorySettings.settings().get("uri"); + if (Strings.hasText(uriSetting) == false) { + throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore"); } + URI uri = URI.create(uriSetting); + if ("hdfs".equalsIgnoreCase(uri.getScheme()) == false) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "Invalid scheme [%s] specified in uri [%s]; only 'hdfs' uri allowed for hdfs snapshot/restore", uri.getScheme(), uriSetting)); + } + if (Strings.hasLength(uri.getPath()) && uri.getPath().equals("/") == false) { + throw new IllegalArgumentException(String.format(Locale.ROOT, + "Use 'path' option to specify a path [%s], not the uri [%s] for hdfs snapshot/restore", uri.getPath(), uriSetting)); + } + + String pathSetting = repositorySettings.settings().get("path"); + // get configuration + if (pathSetting == null) { + throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore"); + } + + int bufferSize = repositorySettings.settings().getAsBytesSize("buffer_size", DEFAULT_BUFFER_SIZE).bytesAsInt(); try { - return AccessController.doPrivileged(new PrivilegedExceptionAction() { + // initialize our filecontext + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + FileContext fileContext = AccessController.doPrivileged(new PrivilegedAction() { @Override - public FileSystem run() throws IOException { - return doGetFileSystem(); + public FileContext run() { + return createContext(uri, repositorySettings); } - }, SecurityUtils.AccBridge.acc()); - } catch (PrivilegedActionException pae) { - Throwable th = pae.getCause(); - if (th instanceof Error) { - throw (Error) th; - } - if (th instanceof RuntimeException) { - throw (RuntimeException) th; - } - if (th instanceof IOException) { - throw (IOException) th; - } - throw new ElasticsearchException(pae); + }); + blobStore = new HdfsBlobStore(fileContext, pathSetting, bufferSize); + logger.debug("Using file-system [{}] for URI [{}], path [{}]", fileContext.getDefaultFileSystem(), fileContext.getDefaultFileSystem().getUri(), pathSetting); + } catch (IOException e) { + throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create HDFS repository for uri [%s]", uri), e); } + super.doStart(); } - - private FileSystem doGetFileSystem() throws IOException { - // check if the fs is still alive - // make a cheap call that triggers little to no security checks - if (fs != null) { - try { - fs.isFile(fs.getWorkingDirectory()); - } catch (IOException ex) { - if (ex.getMessage().contains("Filesystem closed")) { - fs = null; - } - else { - throw ex; - } - } - } - if (fs == null) { - Thread th = Thread.currentThread(); - ClassLoader oldCL = th.getContextClassLoader(); - try { - th.setContextClassLoader(getClass().getClassLoader()); - return initFileSystem(repositorySettings); - } catch (IOException ex) { - throw ex; - } finally { - th.setContextClassLoader(oldCL); - } - } - return fs; - } - - private FileSystem initFileSystem(RepositorySettings repositorySettings) throws IOException { - - Configuration cfg = new Configuration(repositorySettings.settings().getAsBoolean("load_defaults", settings.getAsBoolean("load_defaults", true))); - cfg.setClassLoader(this.getClass().getClassLoader()); + + // create hadoop filecontext + @SuppressForbidden(reason = "lesser of two evils (the other being a bunch of JNI/classloader nightmares)") + private static FileContext createContext(URI uri, RepositorySettings repositorySettings) { + Configuration cfg = new Configuration(repositorySettings.settings().getAsBoolean("load_defaults", true)); + cfg.setClassLoader(HdfsRepository.class.getClassLoader()); cfg.reloadConfiguration(); - String confLocation = repositorySettings.settings().get("conf_location", settings.get("conf_location")); - if (Strings.hasText(confLocation)) { - for (String entry : Strings.commaDelimitedListToStringArray(confLocation)) { - addConfigLocation(cfg, entry.trim()); - } - } - Map map = repositorySettings.settings().getByPrefix("conf.").getAsMap(); for (Entry entry : map.entrySet()) { cfg.set(entry.getKey(), entry.getValue()); } + // create a hadoop user. if we want some auth, it must be done different anyway, and tested. + Subject subject; try { - UserGroupInformation.setConfiguration(cfg); - } catch (Throwable th) { - throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot initialize Hadoop"), th); + Class clazz = Class.forName("org.apache.hadoop.security.User"); + Constructor ctor = clazz.getConstructor(String.class); + ctor.setAccessible(true); + Principal principal = (Principal) ctor.newInstance(System.getProperty("user.name")); + subject = new Subject(false, Collections.singleton(principal), Collections.emptySet(), Collections.emptySet()); + } catch (ReflectiveOperationException e) { + throw new RuntimeException(e); } - String uri = repositorySettings.settings().get("uri", settings.get("uri")); - URI actualUri = (uri != null ? URI.create(uri) : FileSystem.getDefaultUri(cfg)); - String user = repositorySettings.settings().get("user", settings.get("user")); + // disable FS cache + cfg.setBoolean("fs.hdfs.impl.disable.cache", true); - try { - // disable FS cache - String disableFsCache = String.format(Locale.ROOT, "fs.%s.impl.disable.cache", actualUri.getScheme()); - cfg.setBoolean(disableFsCache, true); - - return (user != null ? FileSystem.get(actualUri, cfg, user) : FileSystem.get(actualUri, cfg)); - } catch (Exception ex) { - throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create Hdfs file-system for uri [%s]", actualUri), ex); - } - } - - @SuppressForbidden(reason = "pick up Hadoop config (which can be on HDFS)") - private void addConfigLocation(Configuration cfg, String confLocation) { - URL cfgURL = null; - // it's an URL - if (!confLocation.contains(":")) { - cfgURL = cfg.getClassLoader().getResource(confLocation); - - // fall back to file - if (cfgURL == null) { - java.nio.file.Path path = PathUtils.get(confLocation); - if (!Files.isReadable(path)) { - throw new IllegalArgumentException( - String.format(Locale.ROOT, - "Cannot find classpath resource or file 'conf_location' [%s] defined for hdfs snapshot/restore", - confLocation)); + // create the filecontext with our user + return Subject.doAs(subject, new PrivilegedAction() { + @Override + public FileContext run() { + try { + AbstractFileSystem fs = AbstractFileSystem.get(uri, cfg); + return FileContext.getFileContext(fs, cfg); + } catch (UnsupportedFileSystemException e) { + throw new RuntimeException(e); } - String pathLocation = path.toUri().toString(); - logger.debug("Adding path [{}] as file [{}]", confLocation, pathLocation); - confLocation = pathLocation; } - else { - logger.debug("Resolving path [{}] to classpath [{}]", confLocation, cfgURL); - } - } - else { - logger.debug("Adding path [{}] as URL", confLocation); - } - - if (cfgURL == null) { - try { - cfgURL = new URL(confLocation); - } catch (MalformedURLException ex) { - throw new IllegalArgumentException(String.format(Locale.ROOT, - "Invalid 'conf_location' URL [%s] defined for hdfs snapshot/restore", confLocation), ex); - } - } - - cfg.addResource(cfgURL); + }); } @Override @@ -248,12 +175,4 @@ public class HdfsRepository extends BlobStoreRepository implements FileSystemFac protected ByteSizeValue chunkSize() { return chunkSize; } - - @Override - protected void doClose() throws ElasticsearchException { - super.doClose(); - - IOUtils.closeStream(fs); - fs = null; - } -} \ No newline at end of file +} diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java deleted file mode 100644 index 6a0d4ffa818..00000000000 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/SecurityUtils.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.hdfs; - -import java.io.IOException; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; - -import org.apache.hadoop.fs.FileSystem; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.plugin.hadoop.hdfs.Utils; - -class SecurityUtils { - - abstract static class AccBridge extends Utils { - static AccessControlContext acc() { - return Utils.hadoopACC(); - } - } - - static V execute(FileSystemFactory ffs, FsCallback callback) throws IOException { - return execute(ffs.getFileSystem(), callback); - } - - static V execute(FileSystem fs, FsCallback callback) throws IOException { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - // unprivileged code such as scripts do not have SpecialPermission - sm.checkPermission(new SpecialPermission()); - } - - try { - return AccessController.doPrivileged(new PrivilegedExceptionAction() { - @Override - public V run() throws IOException { - return callback.doInHdfs(fs); - } - }, AccBridge.acc()); - } catch (PrivilegedActionException pae) { - Throwable th = pae.getCause(); - if (th instanceof Error) { - throw (Error) th; - } - if (th instanceof RuntimeException) { - throw (RuntimeException) th; - } - if (th instanceof IOException) { - throw (IOException) th; - } - throw new ElasticsearchException(pae); - } - } -} diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/TestingFs.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/TestingFs.java deleted file mode 100644 index 46cb0a263fe..00000000000 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/TestingFs.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.hdfs; - -import org.apache.hadoop.fs.LocalFileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.RawLocalFileSystem; -import org.elasticsearch.common.SuppressForbidden; - -import java.io.File; -import java.io.IOException; - -/** - * Extends LFS to improve some operations to keep the security permissions at - * bay. In particular mkdir is smarter and doesn't have to walk all the file - * hierarchy but rather only limits itself to the parent/working dir and creates - * a file only when necessary. - */ -public class TestingFs extends LocalFileSystem { - - private static class ImprovedRawLocalFileSystem extends RawLocalFileSystem { - @Override - @SuppressForbidden(reason = "the Hadoop API depends on java.io.File") - public boolean mkdirs(Path f) throws IOException { - File wd = pathToFile(getWorkingDirectory()); - File local = pathToFile(f); - if (wd.equals(local) || local.exists()) { - return true; - } - return mkdirs(f.getParent()) && local.mkdir(); - } - } - - public TestingFs() { - super(new ImprovedRawLocalFileSystem()); - // use the build path instead of the starting dir as that one has read permissions - //setWorkingDirectory(new Path(getClass().getProtectionDomain().getCodeSource().getLocation().toString())); - setWorkingDirectory(new Path(System.getProperty("java.io.tmpdir"))); - } -} diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy index d26acd121e4..85447245c96 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy @@ -18,50 +18,21 @@ */ grant { - // used by the plugin to get the TCCL to properly initialize all of Hadoop components + // Hadoop UserGroupInformation, HdfsConstants, PipelineAck clinit permission java.lang.RuntimePermission "getClassLoader"; - // used for DomainCombiner - permission java.security.SecurityPermission "createAccessControlContext"; - - // set TCCL used for bootstrapping Hadoop Configuration and JAAS - permission java.lang.RuntimePermission "setContextClassLoader"; - - // - // Hadoop 1 - // - - // UserGroupInformation (UGI) + // UserGroupInformation (UGI) Metrics clinit + permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; - // UGI triggers JAAS - permission javax.security.auth.AuthPermission "getSubject"; - - // JAAS libraries are not loaded with the proper context in Hadoop, hence why the permission is needed here - permission java.lang.RuntimePermission "loadLibrary.jaas_nt"; - - // which triggers the use of the Kerberos library - permission java.lang.RuntimePermission "accessClassInPackage.sun.security.krb5"; - - // plus LoginContext - permission javax.security.auth.AuthPermission "modifyPrincipals"; - - permission javax.security.auth.AuthPermission "modifyPublicCredentials"; - - permission javax.security.auth.AuthPermission "modifyPrivateCredentials"; - - // - // Hadoop 2 - // - - // UGI (Ugi Metrics) - permission java.lang.RuntimePermission "accessDeclaredMembers"; - - // Shell initialization - reading system props + // org.apache.hadoop.util.StringUtils clinit permission java.util.PropertyPermission "*", "read,write"; + + // org.apache.hadoop.util.ShutdownHookManager clinit + permission java.lang.RuntimePermission "shutdownHooks"; - permission javax.security.auth.PrivateCredentialPermission "org.apache.hadoop.security.Credentials \"*\"", "read"; - - // HftpFileSystem (all present FS are loaded and initialized at startup ...) - permission java.lang.RuntimePermission "setFactory"; -}; \ No newline at end of file + // JAAS is used always, we use a fake subject, hurts nobody + permission javax.security.auth.AuthPermission "getSubject"; + permission javax.security.auth.AuthPermission "doAs"; + permission javax.security.auth.AuthPermission "modifyPrivateCredentials"; +}; diff --git a/plugins/repository-hdfs/src/main/resources/hadoop-libs/README.asciidoc b/plugins/repository-hdfs/src/main/resources/hadoop-libs/README.asciidoc deleted file mode 100644 index e9f85f3cdf7..00000000000 --- a/plugins/repository-hdfs/src/main/resources/hadoop-libs/README.asciidoc +++ /dev/null @@ -1 +0,0 @@ -Folder containing the required Hadoop client libraries and dependencies. \ No newline at end of file diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java deleted file mode 100644 index 4b4e2aa05ef..00000000000 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTestPlugin.java +++ /dev/null @@ -1,15 +0,0 @@ -package org.elasticsearch.plugin.hadoop.hdfs; - -import java.net.URL; -import java.util.Collections; -import java.util.List; - -import org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin; - -public class HdfsTestPlugin extends HdfsPlugin { - - @Override - protected List getHadoopClassLoaderPath(String baseLib) { - return Collections.emptyList(); - } -} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFSCluster.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFSCluster.java deleted file mode 100644 index 0d700615a1a..00000000000 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/MiniHDFSCluster.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.hadoop.hdfs; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.hdfs.server.datanode.DataNode; -import org.elasticsearch.common.SuppressForbidden; - -import java.io.File; - -public class MiniHDFSCluster { - - @SuppressForbidden(reason = "Hadoop is messy") - public static void main(String[] args) throws Exception { - FileUtil.fullyDelete(new File(System.getProperty("test.build.data", "build/test/data"), "dfs/")); - // MiniHadoopClusterManager.main(new String[] { "-nomr" }); - Configuration cfg = new Configuration(); - cfg.set(DataNode.DATA_DIR_PERMISSION_KEY, "666"); - cfg.set("dfs.replication", "0"); - MiniDFSCluster dfsCluster = new MiniDFSCluster(cfg, 1, true, null); - FileSystem fs = dfsCluster.getFileSystem(); - System.out.println(fs.getClass()); - System.out.println(fs.getUri()); - System.out.println(dfsCluster.getHftpFileSystem().getClass()); - - // dfsCluster.shutdown(); - } -} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java deleted file mode 100644 index 2f492eee343..00000000000 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/UtilsTests.java +++ /dev/null @@ -1,11 +0,0 @@ -package org.elasticsearch.plugin.hadoop.hdfs; - -import org.elasticsearch.test.ESTestCase; - -public class UtilsTests extends ESTestCase { - - public void testDetectLibFolder() { - String location = HdfsPlugin.class.getProtectionDomain().getCodeSource().getLocation().toString(); - assertEquals(location, Utils.detectLibFolder()); - } -} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java similarity index 50% rename from plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java rename to plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java index fd87e18cbce..db423cdd44f 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsRepositoryRestIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java @@ -1,17 +1,35 @@ -package org.elasticsearch.plugin.hadoop.hdfs; +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.hdfs; import java.io.IOException; import java.util.Collection; -import org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.hdfs.HdfsPlugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - public class HdfsRepositoryRestIT extends ESRestTestCase { @Override diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java similarity index 54% rename from plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java rename to plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java index d1b23e92538..0e838d17fd3 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugin.hadoop.hdfs; +package org.elasticsearch.repositories.hdfs; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -31,89 +31,43 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.repositories.RepositoryMissingException; -import org.elasticsearch.repositories.hdfs.TestingFs; +import org.elasticsearch.repositories.hdfs.HdfsPlugin; import org.elasticsearch.snapshots.SnapshotState; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.ESIntegTestCase.ThirdParty; -import org.elasticsearch.test.store.MockFSDirectoryService; -import org.junit.After; -import org.junit.Before; +import org.elasticsearch.test.ESSingleNodeTestCase; -/** - * You must specify {@code -Dtests.thirdparty=true} - */ -@ThirdParty -@ClusterScope(scope = Scope.SUITE, numDataNodes = 1, transportClientRatio = 0.0) -public class HdfsTests extends ESIntegTestCase { +public class HdfsTests extends ESSingleNodeTestCase { @Override - public Settings indexSettings() { - return Settings.builder() - .put(super.indexSettings()) - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) - .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false) - .build(); - } - - @Override - protected Settings nodeSettings(int ordinal) { - Settings.Builder settings = Settings.builder() - .put(super.nodeSettings(ordinal)) - .put("path.home", createTempDir()) - .put("path.repo", "") - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) - .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false); - return settings.build(); - } - - @Override - protected Collection> nodePlugins() { - return pluginList(HdfsTestPlugin.class); - } - - private String path; - - @Before - public final void wipeBefore() throws Exception { - wipeRepositories(); - path = "build/data/repo-" + randomInt(); - } - - @After - public final void wipeAfter() throws Exception { - wipeRepositories(); + protected Collection> getPlugins() { + return pluginList(HdfsPlugin.class); } public void testSimpleWorkflow() { Client client = client(); - logger.info("--> creating hdfs repository with path [{}]", path); PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") .setType("hdfs") .setSettings(Settings.settingsBuilder() - //.put("uri", "hdfs://127.0.0.1:51227") - .put("conf.fs.es-hdfs.impl", TestingFs.class.getName()) - .put("uri", "es-hdfs://./build/") - .put("path", path) - .put("conf", "additional-cfg.xml, conf-2.xml") + .put("uri", "hdfs:///") + .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) + .put("path", "foo") .put("chunk_size", randomIntBetween(100, 1000) + "k") .put("compress", randomBoolean()) ).get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - createIndex("test-idx-1", "test-idx-2", "test-idx-3"); + createIndex("test-idx-1"); + createIndex("test-idx-2"); + createIndex("test-idx-3"); ensureGreen(); logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i); - index("test-idx-2", "doc", Integer.toString(i), "foo", "baz" + i); - index("test-idx-3", "doc", Integer.toString(i), "foo", "baz" + i); + client().prepareIndex("test-idx-1", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-2", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-3", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); } - refresh(); + client().admin().indices().prepareRefresh().get(); assertThat(count(client, "test-idx-1"), equalTo(100L)); assertThat(count(client, "test-idx-2"), equalTo(100L)); assertThat(count(client, "test-idx-3"), equalTo(100L)); @@ -135,7 +89,7 @@ public class HdfsTests extends ESIntegTestCase { for (int i = 0; i < 100; i += 2) { client.prepareDelete("test-idx-3", "doc", Integer.toString(i)).get(); } - refresh(); + client().admin().indices().prepareRefresh().get(); assertThat(count(client, "test-idx-1"), equalTo(50L)); assertThat(count(client, "test-idx-2"), equalTo(50L)); assertThat(count(client, "test-idx-3"), equalTo(50L)); @@ -154,7 +108,7 @@ public class HdfsTests extends ESIntegTestCase { // Test restore after index deletion logger.info("--> delete indices"); - wipeIndices("test-idx-1", "test-idx-2"); + client().admin().indices().prepareDelete("test-idx-1", "test-idx-2").get(); logger.info("--> restore one index after deletion"); restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-*", "-test-idx-2").execute().actionGet(); assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); @@ -165,54 +119,71 @@ public class HdfsTests extends ESIntegTestCase { assertThat(clusterState.getMetaData().hasIndex("test-idx-2"), equalTo(false)); } - private void wipeIndices(String... indices) { - cluster().wipeIndices(indices); - } - - // RepositoryVerificationException.class - public void testWrongPath() { - Client client = client(); - logger.info("--> creating hdfs repository with path [{}]", path); - + public void testMissingUri() { try { - PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") - .setType("hdfs") - .setSettings(Settings.settingsBuilder() - // .put("uri", "hdfs://127.0.0.1:51227/") - .put("conf.fs.es-hdfs.impl", TestingFs.class.getName()) - .put("uri", "es-hdfs:///") - .put("path", path + "a@b$c#11:22") - .put("chunk_size", randomIntBetween(100, 1000) + "k") - .put("compress", randomBoolean())) - .get(); - assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - - createIndex("test-idx-1", "test-idx-2", "test-idx-3"); - ensureGreen(); - fail("Path name is invalid"); - } catch (RepositoryException re) { - // expected + client().admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.EMPTY).get(); + fail(); + } catch (RepositoryException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertTrue(e.getCause().getMessage().contains("No 'uri' defined for hdfs")); } } - /** - * Deletes repositories, supports wildcard notation. - */ - public static void wipeRepositories(String... repositories) { - // if nothing is provided, delete all - if (repositories.length == 0) { - repositories = new String[]{"*"}; + public void testEmptyUri() { + try { + client().admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.builder() + .put("uri", "/path").build()).get(); + fail(); + } catch (RepositoryException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertTrue(e.getCause().getMessage(), e.getCause().getMessage().contains("Invalid scheme [null] specified in uri [/path]")); } - for (String repository : repositories) { - try { - client().admin().cluster().prepareDeleteRepository(repository).execute().actionGet(); - } catch (RepositoryMissingException ex) { - // ignore - } + } + + public void testNonHdfsUri() { + try { + client().admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.builder() + .put("uri", "file:///").build()).get(); + fail(); + } catch (RepositoryException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertTrue(e.getCause().getMessage().contains("Invalid scheme [file] specified in uri [file:///]")); + } + } + + public void testPathSpecifiedInHdfs() { + try { + client().admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.builder() + .put("uri", "hdfs:///some/path").build()).get(); + fail(); + } catch (RepositoryException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertTrue(e.getCause().getMessage().contains("Use 'path' option to specify a path [/some/path]")); + } + } + + public void testMissingPath() { + try { + client().admin().cluster().preparePutRepository("test-repo") + .setType("hdfs") + .setSettings(Settings.builder() + .put("uri", "hdfs:///").build()).get(); + fail(); + } catch (RepositoryException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertTrue(e.getCause().getMessage().contains("No 'path' defined for hdfs")); } } private long count(Client client, String index) { return client.prepareSearch(index).setSize(0).get().getHits().totalHits(); } -} \ No newline at end of file +} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/TestingFs.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/TestingFs.java new file mode 100644 index 00000000000..c9c3c46c12c --- /dev/null +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/TestingFs.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.hdfs; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.DelegateToFileSystem; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.RawLocalFileSystem; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.lucene.util.LuceneTestCase; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Path; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.spi.FileSystemProvider; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; + +/** + * Extends LFS to improve some operations to keep the security permissions at + * bay. In particular it never tries to execute! + */ +public class TestingFs extends DelegateToFileSystem { + + // wrap hadoop rawlocalfilesystem to behave less crazy + static RawLocalFileSystem wrap(final Path base) { + final FileSystemProvider baseProvider = base.getFileSystem().provider(); + return new RawLocalFileSystem() { + + private org.apache.hadoop.fs.Path box(Path path) { + return new org.apache.hadoop.fs.Path(path.toUri()); + } + + private Path unbox(org.apache.hadoop.fs.Path path) { + return baseProvider.getPath(path.toUri()); + } + + @Override + protected org.apache.hadoop.fs.Path getInitialWorkingDirectory() { + return box(base); + } + + @Override + public void setPermission(org.apache.hadoop.fs.Path path, FsPermission permission) { + // no execution, thank you very much! + } + + // pretend we don't support symlinks (which causes hadoop to want to do crazy things), + // returning the boolean does not seem to really help, link-related operations are still called. + + @Override + public boolean supportsSymlinks() { + return false; + } + + @Override + public FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path path) throws IOException { + return getFileStatus(path); + } + + @Override + public org.apache.hadoop.fs.Path getLinkTarget(org.apache.hadoop.fs.Path path) throws IOException { + return path; + } + + @Override + public FileStatus getFileStatus(org.apache.hadoop.fs.Path path) throws IOException { + BasicFileAttributes attributes; + try { + attributes = Files.readAttributes(unbox(path), BasicFileAttributes.class); + } catch (NoSuchFileException e) { + // unfortunately, specific exceptions are not guaranteed. don't wrap hadoop over a zip filesystem or something. + FileNotFoundException fnfe = new FileNotFoundException("File " + path + " does not exist"); + fnfe.initCause(e); + throw fnfe; + } + + // we set similar values to raw local filesystem, except we are never a symlink + long length = attributes.size(); + boolean isDir = attributes.isDirectory(); + int blockReplication = 1; + long blockSize = getDefaultBlockSize(path); + long modificationTime = attributes.creationTime().toMillis(); + return new FileStatus(length, isDir, blockReplication, blockSize, modificationTime, path); + } + }; + } + + public TestingFs(URI uri, Configuration configuration) throws URISyntaxException, IOException { + super(URI.create("file:///"), wrap(LuceneTestCase.createTempDir()), configuration, "file", false); + } + + @Override + public void checkPath(org.apache.hadoop.fs.Path path) { + // we do evil stuff, we admit it. + } +} diff --git a/plugins/repository-hdfs/src/test/resources/additional-cfg.xml b/plugins/repository-hdfs/src/test/resources/additional-cfg.xml deleted file mode 100644 index b1b6611e924..00000000000 --- a/plugins/repository-hdfs/src/test/resources/additional-cfg.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - foo - foo - - - paradise - lost - - diff --git a/plugins/repository-hdfs/src/test/resources/conf-2.xml b/plugins/repository-hdfs/src/test/resources/conf-2.xml deleted file mode 100644 index b1b6611e924..00000000000 --- a/plugins/repository-hdfs/src/test/resources/conf-2.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - foo - foo - - - paradise - lost - - diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml index b7bc644a832..7c569408a61 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml @@ -2,7 +2,7 @@ # # Check plugin is installed # -"HDFS Repository loaded": +"Plugin loaded": - do: cluster.state: {} @@ -14,3 +14,18 @@ - match: { nodes.$master.plugins.0.name: repository-hdfs } - match: { nodes.$master.plugins.0.jvm: true } +--- +# +# Check that we can't use file:// repositories or anything like that +# We only test this plugin against hdfs:// +# +"HDFS only": + - do: + catch: /Invalid scheme/ + snapshot.create_repository: + repository: misconfigured_repository + body: + type: hdfs + settings: + uri: "file://bogus" + path: "foo/bar" diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.disabled b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.disabled deleted file mode 100644 index f1f5f7a65e0..00000000000 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.disabled +++ /dev/null @@ -1,25 +0,0 @@ -# Integration tests for HDFS Repository plugin -# -# Check plugin is installed -# -"HDFS Repository Config": - - do: - snapshot.create_repository: - repository: test_repo_hdfs_1 - verify: false - body: - type: hdfs - settings: - # local HDFS implementation - conf.fs.es-hdfs.impl: "org.elasticsearch.repositories.hdfs.TestingFs" - uri: "es-hdfs://./build/" - path: "build/data/repo-hdfs" - - # Get repositry - - do: - snapshot.get_repository: - repository: test_repo_hdfs_1 - - - is_true: test_repo_hdfs_1 - - is_true: test_repo_hdfs_1.settings.uri - - match: {test_repo_hdfs_1.settings.path : "build/data/repo-hdfs"} diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yaml new file mode 100644 index 00000000000..0f942dfdc03 --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yaml @@ -0,0 +1,27 @@ +# Integration tests for HDFS Repository plugin +# +# Tests creating a repository +# +"HDFS Repository Creation": + # Create repository + - do: + snapshot.create_repository: + repository: test_repository_create + body: + type: hdfs + settings: + uri: "hdfs://localhost:9999" + path: "test/repository_create" + + # Get repository + - do: + snapshot.get_repository: + repository: test_repository_create + + - is_true: test_repository_create + - match: {test_repository_create.settings.path : "test/repository_create"} + + # Remove our repository + - do: + snapshot.delete_repository: + repository: test_repository_create diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_delete.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_delete.yaml new file mode 100644 index 00000000000..34c770a8074 --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_delete.yaml @@ -0,0 +1,50 @@ +# Integration tests for HDFS Repository plugin +# +# Tests creating a repository, then deleting it and creating it again. +# +"HDFS Delete Repository": + # Create repository + - do: + snapshot.create_repository: + repository: test_repo_hdfs_1 + body: + type: hdfs + settings: + uri: "hdfs://localhost:9999" + path: "foo/bar" + + # Get repository + - do: + snapshot.get_repository: + repository: test_repo_hdfs_1 + + - is_true: test_repo_hdfs_1 + - match: {test_repo_hdfs_1.settings.path : "foo/bar"} + + # Delete repository + - do: + snapshot.delete_repository: + repository: test_repo_hdfs_1 + + # Get repository: It should be gone + - do: + catch: /repository_missing_exception/ + snapshot.get_repository: + repository: test_repo_hdfs_1 + + # Create it again + - do: + snapshot.create_repository: + repository: test_repo_hdfs_1 + body: + type: hdfs + settings: + uri: "hdfs://localhost:9999" + path: "foo/bar" + + # Get repository again + - do: + snapshot.get_repository: + repository: test_repo_hdfs_1 + + - is_true: test_repo_hdfs_1 diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_verify.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_verify.yaml new file mode 100644 index 00000000000..d1695b00d9d --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_verify.yaml @@ -0,0 +1,23 @@ +# Integration tests for HDFS Repository plugin +# +# Tests explicit verify +# +"HDFS Repository Verify": + - do: + snapshot.create_repository: + repository: test_repository_verify + body: + type: hdfs + settings: + uri: "hdfs://localhost:9999" + path: "test/repository_verify" + + # Verify repository + - do: + snapshot.verify_repository: + repository: test_repository_verify + + # Remove our repository + - do: + snapshot.delete_repository: + repository: test_repository_verify diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml new file mode 100644 index 00000000000..7db9a429230 --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml @@ -0,0 +1,53 @@ +# Integration tests for HDFS Repository plugin +# +# Actually perform a snapshot to hdfs +# +--- +"take snapshot": + # Create repository + - do: + snapshot.create_repository: + repository: test_snapshot_repository + body: + type: hdfs + settings: + uri: "hdfs://localhost:9999" + path: "test/snapshot" + + # Create index + - do: + indices.create: + index: test_index + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 + + # Wait for yellow + - do: + cluster.health: + wait_for_status: yellow + + # Create snapshot + - do: + snapshot.create: + repository: test_snapshot_repository + snapshot: test_snapshot + wait_for_completion: true + + - match: { snapshot.snapshot: test_snapshot } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.successful: 1 } + - match: { snapshot.shards.failed : 0 } + + # Remove our snapshot + - do: + snapshot.delete: + repository: test_snapshot_repository + snapshot: test_snapshot + + # Remove our repository + - do: + snapshot.delete_repository: + repository: test_snapshot_repository + diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yaml new file mode 100644 index 00000000000..f38f4783b19 --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yaml @@ -0,0 +1,70 @@ +# Integration tests for HDFS Repository plugin +# +# Tests retrieving information about snapshot +# +--- +"Get a snapshot": + # Create repository + - do: + snapshot.create_repository: + repository: test_snapshot_get_repository + body: + type: hdfs + settings: + uri: "hdfs://localhost:9999" + path: "test/snapshot_get" + + # Create index + - do: + indices.create: + index: test_index + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + + # Wait for green + - do: + cluster.health: + wait_for_status: green + + # Create snapshot + - do: + snapshot.create: + repository: test_snapshot_get_repository + snapshot: test_snapshot_get + wait_for_completion: true + + - match: { snapshot.snapshot: test_snapshot_get } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.successful: 1 } + - match: { snapshot.shards.failed : 0 } + + # Get snapshot info + - do: + snapshot.get: + repository: test_snapshot_get_repository + snapshot: test_snapshot_get + + - length: { snapshots: 1 } + - match: { snapshots.0.snapshot : test_snapshot_get } + + # List snapshot info + - do: + snapshot.get: + repository: test_snapshot_get_repository + snapshot: "*" + + - length: { snapshots: 1 } + - match: { snapshots.0.snapshot : test_snapshot_get } + + # Remove our snapshot + - do: + snapshot.delete: + repository: test_snapshot_get_repository + snapshot: test_snapshot_get + + # Remove our repository + - do: + snapshot.delete_repository: + repository: test_snapshot_get_repository diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml new file mode 100644 index 00000000000..47559226275 --- /dev/null +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml @@ -0,0 +1,79 @@ +# Integration tests for HDFS Repository plugin +# +# Actually perform a snapshot to hdfs, then restore it +# +--- +"Create a snapshot and then restore it": + + # Create repository + - do: + snapshot.create_repository: + repository: test_restore_repository + body: + type: hdfs + settings: + uri: "hdfs://localhost:9999" + path: "test/restore" + + # Create index + - do: + indices.create: + index: test_index + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + + # Wait for green + - do: + cluster.health: + wait_for_status: green + + # Take snapshot + - do: + snapshot.create: + repository: test_restore_repository + snapshot: test_restore + wait_for_completion: true + + - match: { snapshot.snapshot: test_restore } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.successful: 1 } + - match: { snapshot.shards.failed : 0 } + - is_true: snapshot.version + - gt: { snapshot.version_id: 0} + + # Close index + - do: + indices.close: + index : test_index + + # Restore index + - do: + snapshot.restore: + repository: test_restore_repository + snapshot: test_restore + wait_for_completion: true + + # Check recovery stats + - do: + indices.recovery: + index: test_index + + - match: { test_index.shards.0.type: SNAPSHOT } + - match: { test_index.shards.0.stage: DONE } + - match: { test_index.shards.0.index.files.recovered: 1} + - gt: { test_index.shards.0.index.size.recovered_in_bytes: 0} + - match: { test_index.shards.0.index.files.reused: 0} + - match: { test_index.shards.0.index.size.reused_in_bytes: 0} + + # Remove our snapshot + - do: + snapshot.delete: + repository: test_restore_repository + snapshot: test_restore + + # Remove our repository + - do: + snapshot.delete_repository: + repository: test_restore_repository diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 32ad37530c2..b11aa732b25 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -49,3 +49,17 @@ test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name } + +thirdPartyAudit.excludes = [ + // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl + // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault + // uses internal java api: com.sun.org.apache.xpath.internal.XPathContext + 'com.amazonaws.util.XpathUtils', + + // classes are missing + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', +] diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java index 7d0b72cd63c..51594c01302 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java @@ -21,7 +21,12 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; -import com.amazonaws.auth.*; +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.AWSCredentialsProviderChain; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.EnvironmentVariableCredentialsProvider; +import com.amazonaws.auth.InstanceProfileCredentialsProvider; +import com.amazonaws.auth.SystemPropertiesCredentialsProvider; import com.amazonaws.http.IdleConnectionReaper; import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.services.s3.AmazonS3; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java index 8063ba7de33..dd278a9231d 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java @@ -20,7 +20,16 @@ package org.elasticsearch.cloud.aws.blobstore; import com.amazonaws.AmazonClientException; -import com.amazonaws.services.s3.model.*; +import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; +import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.PartETag; +import com.amazonaws.services.s3.model.PutObjectRequest; +import com.amazonaws.services.s3.model.PutObjectResult; +import com.amazonaws.services.s3.model.UploadPartRequest; +import com.amazonaws.services.s3.model.UploadPartResult; import com.amazonaws.util.Base64; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java index 4f5c46a8c59..4861ccc202b 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java @@ -20,7 +20,12 @@ package org.elasticsearch.cloud.aws.blobstore; import com.amazonaws.AmazonClientException; -import com.amazonaws.services.s3.model.*; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.CopyObjectRequest; +import com.amazonaws.services.s3.model.ObjectListing; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.model.S3ObjectSummary; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java index 198e1862da2..a8bf3ea2959 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java @@ -21,8 +21,14 @@ package org.elasticsearch.cloud.aws.blobstore; import com.amazonaws.AmazonClientException; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.*; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.CannedAccessControlList; +import com.amazonaws.services.s3.model.CreateBucketRequest; +import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; +import com.amazonaws.services.s3.model.ObjectListing; +import com.amazonaws.services.s3.model.S3ObjectSummary; +import com.amazonaws.services.s3.model.StorageClass; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java index a38a8ed3c51..a7305727353 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java @@ -30,7 +30,6 @@ import org.elasticsearch.repositories.s3.S3Repository; import java.security.AccessController; import java.security.PrivilegedAction; -import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 760968b0bf3..612f8a9eea7 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -85,7 +85,7 @@ public class S3Repository extends BlobStoreRepository { String region = repositorySettings.settings().get("region", settings.get(REPOSITORY_S3.REGION)); if (region == null) { - // Bucket setting is not set - use global region setting + // InternalBucket setting is not set - use global region setting String regionSetting = settings.get(CLOUD_AWS.REGION); if (regionSetting != null) { regionSetting = regionSetting.toLowerCase(Locale.ENGLISH); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java index cd3584d7bec..6346ffe57d4 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.ClientConfiguration; - import org.elasticsearch.test.ESTestCase; import static org.hamcrest.CoreMatchers.is; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java index 0c9e7535db0..97829f9d689 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java @@ -27,7 +27,86 @@ import com.amazonaws.regions.Region; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.S3ClientOptions; import com.amazonaws.services.s3.S3ResponseMetadata; -import com.amazonaws.services.s3.model.*; +import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; +import com.amazonaws.services.s3.model.AccessControlList; +import com.amazonaws.services.s3.model.Bucket; +import com.amazonaws.services.s3.model.BucketCrossOriginConfiguration; +import com.amazonaws.services.s3.model.BucketLifecycleConfiguration; +import com.amazonaws.services.s3.model.BucketLoggingConfiguration; +import com.amazonaws.services.s3.model.BucketNotificationConfiguration; +import com.amazonaws.services.s3.model.BucketPolicy; +import com.amazonaws.services.s3.model.BucketReplicationConfiguration; +import com.amazonaws.services.s3.model.BucketTaggingConfiguration; +import com.amazonaws.services.s3.model.BucketVersioningConfiguration; +import com.amazonaws.services.s3.model.BucketWebsiteConfiguration; +import com.amazonaws.services.s3.model.CannedAccessControlList; +import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; +import com.amazonaws.services.s3.model.CompleteMultipartUploadResult; +import com.amazonaws.services.s3.model.CopyObjectRequest; +import com.amazonaws.services.s3.model.CopyObjectResult; +import com.amazonaws.services.s3.model.CopyPartRequest; +import com.amazonaws.services.s3.model.CopyPartResult; +import com.amazonaws.services.s3.model.CreateBucketRequest; +import com.amazonaws.services.s3.model.DeleteBucketCrossOriginConfigurationRequest; +import com.amazonaws.services.s3.model.DeleteBucketLifecycleConfigurationRequest; +import com.amazonaws.services.s3.model.DeleteBucketPolicyRequest; +import com.amazonaws.services.s3.model.DeleteBucketRequest; +import com.amazonaws.services.s3.model.DeleteBucketTaggingConfigurationRequest; +import com.amazonaws.services.s3.model.DeleteBucketWebsiteConfigurationRequest; +import com.amazonaws.services.s3.model.DeleteObjectRequest; +import com.amazonaws.services.s3.model.DeleteObjectsRequest; +import com.amazonaws.services.s3.model.DeleteObjectsResult; +import com.amazonaws.services.s3.model.DeleteVersionRequest; +import com.amazonaws.services.s3.model.GeneratePresignedUrlRequest; +import com.amazonaws.services.s3.model.GetBucketAclRequest; +import com.amazonaws.services.s3.model.GetBucketCrossOriginConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketLifecycleConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketLocationRequest; +import com.amazonaws.services.s3.model.GetBucketLoggingConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketNotificationConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketPolicyRequest; +import com.amazonaws.services.s3.model.GetBucketReplicationConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketTaggingConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketVersioningConfigurationRequest; +import com.amazonaws.services.s3.model.GetBucketWebsiteConfigurationRequest; +import com.amazonaws.services.s3.model.GetObjectMetadataRequest; +import com.amazonaws.services.s3.model.GetObjectRequest; +import com.amazonaws.services.s3.model.GetS3AccountOwnerRequest; +import com.amazonaws.services.s3.model.HeadBucketRequest; +import com.amazonaws.services.s3.model.HeadBucketResult; +import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; +import com.amazonaws.services.s3.model.InitiateMultipartUploadResult; +import com.amazonaws.services.s3.model.ListBucketsRequest; +import com.amazonaws.services.s3.model.ListMultipartUploadsRequest; +import com.amazonaws.services.s3.model.ListNextBatchOfObjectsRequest; +import com.amazonaws.services.s3.model.ListNextBatchOfVersionsRequest; +import com.amazonaws.services.s3.model.ListObjectsRequest; +import com.amazonaws.services.s3.model.ListPartsRequest; +import com.amazonaws.services.s3.model.ListVersionsRequest; +import com.amazonaws.services.s3.model.MultipartUploadListing; +import com.amazonaws.services.s3.model.ObjectListing; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.Owner; +import com.amazonaws.services.s3.model.PartListing; +import com.amazonaws.services.s3.model.PutObjectRequest; +import com.amazonaws.services.s3.model.PutObjectResult; +import com.amazonaws.services.s3.model.RestoreObjectRequest; +import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.model.SetBucketAclRequest; +import com.amazonaws.services.s3.model.SetBucketCrossOriginConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketLifecycleConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketLoggingConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketNotificationConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketPolicyRequest; +import com.amazonaws.services.s3.model.SetBucketReplicationConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketTaggingConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketVersioningConfigurationRequest; +import com.amazonaws.services.s3.model.SetBucketWebsiteConfigurationRequest; +import com.amazonaws.services.s3.model.SetObjectAclRequest; +import com.amazonaws.services.s3.model.StorageClass; +import com.amazonaws.services.s3.model.UploadPartRequest; +import com.amazonaws.services.s3.model.UploadPartResult; +import com.amazonaws.services.s3.model.VersionListing; import org.elasticsearch.common.SuppressForbidden; import java.io.File; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java index d2ed3ba952f..31682ee4de6 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java @@ -22,7 +22,12 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.*; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.PutObjectResult; +import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.model.UploadPartRequest; +import com.amazonaws.services.s3.model.UploadPartResult; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java index 9ffa1286bc6..55f88fbfeea 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java @@ -23,7 +23,6 @@ import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.S3ObjectSummary; - import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java index 744be5e49de..9d9bdc1d389 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java @@ -23,7 +23,6 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockFactory; import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.store.SmbDirectoryWrapper; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.FsDirectoryService; diff --git a/plugins/store-smb/src/test/java/org/apache/lucene/store/ESBaseDirectoryTestCase.java b/plugins/store-smb/src/test/java/org/apache/lucene/store/ESBaseDirectoryTestCase.java index 4c6c230c5dc..23590b8f52f 100644 --- a/plugins/store-smb/src/test/java/org/apache/lucene/store/ESBaseDirectoryTestCase.java +++ b/plugins/store-smb/src/test/java/org/apache/lucene/store/ESBaseDirectoryTestCase.java @@ -19,14 +19,13 @@ package org.apache.lucene.store; * under the License. */ +import com.carrotsearch.randomizedtesting.annotations.Listeners; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TimeUnits; import org.elasticsearch.bootstrap.BootstrapForTesting; import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter; -import com.carrotsearch.randomizedtesting.annotations.Listeners; -import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; - /** * Extends Lucene's BaseDirectoryTestCase with ES test behavior. */ diff --git a/qa/evil-tests/build.gradle b/qa/evil-tests/build.gradle index 96aa6fb635d..53406f1aad9 100644 --- a/qa/evil-tests/build.gradle +++ b/qa/evil-tests/build.gradle @@ -34,3 +34,15 @@ dependencies { test { systemProperty 'tests.security.manager', 'false' } + +thirdPartyAudit.excludes = [ + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + + // missing class + 'com.ibm.icu.lang.UCharacter', +] diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java index 9faa604a18e..78085b201a3 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java @@ -34,8 +34,13 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; -import static org.elasticsearch.common.cli.CliTool.ExitStatus.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK; +import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK_AND_EXIT; +import static org.elasticsearch.common.cli.CliTool.ExitStatus.USAGE; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; @SuppressForbidden(reason = "modifies system properties intentionally") public class BootstrapCliParserTests extends CliToolTestCase { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java index 95d0789fbf8..8633511756d 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.cli; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; - import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.env.Environment; diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java index 52486ba7d62..3789c273cf8 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java @@ -30,7 +30,9 @@ import java.util.HashMap; import java.util.Map; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; @SuppressForbidden(reason = "modifies system properties intentionally") public class EvilInternalSettingsPreparerTests extends ESTestCase { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java index d5a07606e65..0eebc9731ff 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java @@ -35,16 +35,28 @@ import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; -import java.nio.file.attribute.*; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFileAttributes; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; import java.util.HashSet; import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; -import static java.nio.file.attribute.PosixFilePermission.*; +import static java.nio.file.attribute.PosixFilePermission.GROUP_EXECUTE; +import static java.nio.file.attribute.PosixFilePermission.OTHERS_EXECUTE; +import static java.nio.file.attribute.PosixFilePermission.OWNER_EXECUTE; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertDirectoryExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; // there are some lucene file systems that seem to cause problems (deleted files, dirs instead of files) @LuceneTestCase.SuppressFileSystems("*") diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java index b2b2c0cff5c..bc92f894019 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java @@ -36,9 +36,19 @@ import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.elasticsearch.test.rest.client.http.HttpResponse; import org.jboss.netty.bootstrap.ServerBootstrap; -import org.jboss.netty.channel.*; +import org.jboss.netty.channel.Channel; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ChannelPipeline; +import org.jboss.netty.channel.ChannelPipelineFactory; +import org.jboss.netty.channel.Channels; +import org.jboss.netty.channel.MessageEvent; +import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; -import org.jboss.netty.handler.codec.http.*; +import org.jboss.netty.handler.codec.http.DefaultHttpResponse; +import org.jboss.netty.handler.codec.http.HttpRequest; +import org.jboss.netty.handler.codec.http.HttpRequestDecoder; +import org.jboss.netty.handler.codec.http.HttpResponseEncoder; +import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.ssl.SslContext; import org.jboss.netty.handler.ssl.SslHandler; import org.jboss.netty.handler.ssl.util.InsecureTrustManagerFactory; @@ -75,8 +85,14 @@ import static org.elasticsearch.common.cli.CliToolTestCase.args; import static org.elasticsearch.common.io.FileTestUtils.assertFileContent; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertDirectoryExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1; @ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 0.0) @@ -694,7 +710,7 @@ public class PluginManagerTests extends ESIntegTestCase { Channel channel = serverBootstrap.bind(new InetSocketAddress(InetAddress.getByName("localhost"), 0)); int port = ((InetSocketAddress) channel.getLocalAddress()).getPort(); // IO_ERROR because there is no real file delivered... - assertStatus(String.format(Locale.ROOT, "install https://user:pass@localhost:%s/foo.zip --verbose --timeout 1s", port), ExitStatus.IO_ERROR); + assertStatus(String.format(Locale.ROOT, "install https://user:pass@localhost:%s/foo.zip --verbose --timeout 10s", port), ExitStatus.IO_ERROR); // ensure that we did not try any other data source like download.elastic.co, in case we specified our own local URL assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("download.elastic.co")))); diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index 95df2d04458..cddea9fd774 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -20,7 +20,6 @@ package org.elasticsearch.smoketest; import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.SuppressForbidden; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; diff --git a/qa/smoke-test-plugins/build.gradle b/qa/smoke-test-plugins/build.gradle index 70611aed371..bc8eace704e 100644 --- a/qa/smoke-test-plugins/build.gradle +++ b/qa/smoke-test-plugins/build.gradle @@ -22,15 +22,16 @@ import org.elasticsearch.gradle.MavenFilteringHack apply plugin: 'elasticsearch.rest-test' ext.pluginsCount = 0 -project.rootProject.subprojects.findAll { it.path.startsWith(':projects:') }.each { subproj -> +project.rootProject.subprojects.findAll { it.parent.path == ':plugins' }.each { subproj -> integTest { cluster { // need to get a non-decorated project object, so must re-lookup the project by path plugin subproj.name, project(subproj.path) } } - pluginCount += 1 + pluginsCount += 1 } +assert pluginsCount > 0 ext.expansions = [ 'expected.plugins.count': pluginsCount diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 9d3d424f62e..d0be6f13946 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -17,10 +17,10 @@ * under the License. */ -import org.elasticsearch.gradle.vagrant.VagrantCommandTask -import org.elasticsearch.gradle.vagrant.BatsOverVagrantTask + import org.elasticsearch.gradle.FileContentsTask -import org.gradle.api.InvalidUserDataException +import org.elasticsearch.gradle.vagrant.BatsOverVagrantTask +import org.elasticsearch.gradle.vagrant.VagrantCommandTask String testScripts = '*.bats' String testCommand = "cd \$TESTROOT && sudo bats --tap \$BATS/$testScripts" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json new file mode 100644 index 00000000000..02acf10d1f7 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json @@ -0,0 +1,35 @@ +{ + "tasks.list": { + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/tasks-list.html", + "methods": ["GET"], + "url": { + "path": "/_tasks", + "paths": ["/_tasks", "/_tasks/{node_id}", "/_tasks/{node_id}/{actions}"], + "parts": { + "node_id": { + "type": "list", + "description": "A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes" + }, + "actions": { + "type": "list", + "description": "A comma-separated list of actions that should be returned. Leave empty to return all." + } + }, + "params": { + "detailed": { + "type": "boolean", + "description": "Return detailed task information (default: false)" + }, + "parent_node": { + "type": "string", + "description": "Return tasks with specified parent node." + }, + "parent_task": { + "type" : "number", + "description" : "Return tasks with specified parent task id. Set to -1 to return all." + } + } + }, + "body": null + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml index 2531e6ef025..f41e14919f8 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml @@ -6,8 +6,8 @@ - match: $body: | - / #host ip heap.percent ram.percent cpu load node.role master name - ^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/ + / #host ip heap.percent ram.percent cpu load node.role master name + ^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ (-)?\d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/ - do: cat.nodes: @@ -15,8 +15,8 @@ - match: $body: | - /^ host \s+ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load \s+ node\.role \s+ master \s+ name \n - (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/ + /^ host \s+ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load \s+ node\.role \s+ master \s+ name \n + (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ (-)?\d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/ - do: cat.nodes: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml new file mode 100644 index 00000000000..4162296532d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml @@ -0,0 +1,31 @@ +--- +"Test reset cluster settings": + - do: + cluster.put_settings: + body: + persistent: + cluster.routing.allocation.disk.threshold_enabled: false + flat_settings: true + + - match: {persistent: {cluster.routing.allocation.disk.threshold_enabled: "false"}} + + - do: + cluster.get_settings: + flat_settings: true + + - match: {persistent: {cluster.routing.allocation.disk.threshold_enabled: "false"}} + + - do: + cluster.put_settings: + body: + persistent: + cluster.routing.allocation.disk.threshold_enabled: null + flat_settings: true + + - match: {persistent: {}} + + - do: + cluster.get_settings: + flat_settings: true + + - match: {persistent: {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yaml index e02b948f936..efdcf15cf89 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yaml @@ -35,15 +35,6 @@ test_type: properties: text1: - type: multi_field - fields: - text1: - type: string - analyzer: whitespace - text_raw: - type: string - index: not_analyzed - text2: type: string analyzer: whitespace fields: @@ -58,5 +49,3 @@ - match: {test_index.mappings.test_type.properties.text1.type: string} - match: {test_index.mappings.test_type.properties.text1.fields.text_raw.index: not_analyzed} - - match: {test_index.mappings.test_type.properties.text2.type: string} - - match: {test_index.mappings.test_type.properties.text2.fields.text_raw.index: not_analyzed} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yaml new file mode 100644 index 00000000000..252649abbb6 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yaml @@ -0,0 +1,6 @@ +--- +"tasks_list test": + - do: + tasks.list: {} + + - is_true: nodes diff --git a/settings.gradle b/settings.gradle index e9fb0a043aa..55126b3c808 100644 --- a/settings.gradle +++ b/settings.gradle @@ -8,7 +8,9 @@ List projects = [ 'distribution:tar', 'distribution:deb', 'distribution:rpm', - 'test-framework', + 'test:framework', + 'test:fixtures:example-fixture', + 'test:fixtures:hdfs-fixture', 'modules:lang-expression', 'modules:lang-groovy', 'modules:lang-mustache', diff --git a/test/build.gradle b/test/build.gradle new file mode 100644 index 00000000000..564f8673307 --- /dev/null +++ b/test/build.gradle @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.gradle.precommit.PrecommitTasks + +subprojects { + // fixtures is just an intermediate parent project + if (name == 'fixtures') return + + group = 'org.elasticsearch.test' + apply plugin: 'elasticsearch.build' + + + // the main files are actually test files, so use the appopriate forbidden api sigs + forbiddenApisMain { + bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] + signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'), + PrecommitTasks.getResource('/forbidden/test-signatures.txt')] + } + + // TODO: should we have licenses for our test deps? + dependencyLicenses.enabled = false + + // TODO: why is the test framework pulled in... + forbiddenApisMain.enabled = false + jarHell.enabled = false +} diff --git a/plugins/mapper-attachments/licenses/stax-api-NOTICE.txt b/test/fixtures/build.gradle similarity index 100% rename from plugins/mapper-attachments/licenses/stax-api-NOTICE.txt rename to test/fixtures/build.gradle diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FsCallback.java b/test/fixtures/example-fixture/build.gradle similarity index 80% rename from plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FsCallback.java rename to test/fixtures/example-fixture/build.gradle index 3eda2272149..17a4586a54d 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/FsCallback.java +++ b/test/fixtures/example-fixture/build.gradle @@ -17,13 +17,5 @@ * under the License. */ -package org.elasticsearch.repositories.hdfs; - -import java.io.IOException; - -import org.apache.hadoop.fs.FileSystem; - -interface FsCallback { - - V doInHdfs(FileSystem fs) throws IOException; -} +apply plugin: 'elasticsearch.build' +test.enabled = false diff --git a/test/fixtures/example-fixture/src/main/java/example/ExampleTestFixture.java b/test/fixtures/example-fixture/src/main/java/example/ExampleTestFixture.java new file mode 100644 index 00000000000..603aba1fc63 --- /dev/null +++ b/test/fixtures/example-fixture/src/main/java/example/ExampleTestFixture.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package example; + +import java.lang.management.ManagementFactory; +import java.net.Inet6Address; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.nio.ByteBuffer; +import java.nio.channels.AsynchronousServerSocketChannel; +import java.nio.channels.AsynchronousSocketChannel; +import java.nio.channels.CompletionHandler; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.Collections; + +/** Crappy example test fixture that responds with TEST and closes the connection */ +public class ExampleTestFixture { + public static void main(String args[]) throws Exception { + if (args.length != 1) { + throw new IllegalArgumentException("ExampleTestFixture "); + } + Path dir = Paths.get(args[0]); + AsynchronousServerSocketChannel server = AsynchronousServerSocketChannel + .open() + .bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + + // write pid file + Path tmp = Files.createTempFile(dir, null, null); + String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; + Files.write(tmp, Collections.singleton(pid)); + Files.move(tmp, dir.resolve("pid"), StandardCopyOption.ATOMIC_MOVE); + + // write port file + tmp = Files.createTempFile(dir, null, null); + InetSocketAddress bound = (InetSocketAddress) server.getLocalAddress(); + if (bound.getAddress() instanceof Inet6Address) { + Files.write(tmp, Collections.singleton("[" + bound.getHostString() + "]:" + bound.getPort())); + } else { + Files.write(tmp, Collections.singleton(bound.getHostString() + ":" + bound.getPort())); + } + Files.move(tmp, dir.resolve("ports"), StandardCopyOption.ATOMIC_MOVE); + + // go time + server.accept(null, new CompletionHandler() { + @Override + public void completed(AsynchronousSocketChannel socket, Void attachment) { + server.accept(null, this); + try (AsynchronousSocketChannel ch = socket) { + ch.write(ByteBuffer.wrap("TEST\n".getBytes(StandardCharsets.UTF_8))).get(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public void failed(Throwable exc, Void attachment) {} + }); + + // wait forever, until you kill me + Thread.sleep(Long.MAX_VALUE); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldTypeReference.java b/test/fixtures/hdfs-fixture/build.gradle similarity index 54% rename from core/src/main/java/org/elasticsearch/index/mapper/MappedFieldTypeReference.java rename to test/fixtures/hdfs-fixture/build.gradle index 1a9d0b70b37..3d63939f66e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldTypeReference.java +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -16,26 +16,27 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.mapper; -/** - * A container for a {@link MappedFieldType} which can be updated and is reference counted. - */ -public class MappedFieldTypeReference { - private MappedFieldType fieldType; // the current field type this reference points to +apply plugin: 'elasticsearch.build' - public MappedFieldTypeReference(MappedFieldType fieldType) { - fieldType.freeze(); // ensure frozen - this.fieldType = fieldType; - } - - public MappedFieldType get() { - return fieldType; - } - - public void set(MappedFieldType fieldType) { - fieldType.freeze(); // ensure frozen - this.fieldType = fieldType; - } +versions << [ + 'hadoop2': '2.7.1' +] +// we create MiniHdfsCluster with the hadoop artifact +dependencies { + compile "org.apache.hadoop:hadoop-minicluster:${versions.hadoop2}" } + +// for testing, until fixtures are actually debuggable. +// gradle hides *EVERYTHING* so you have no clue what went wrong. +task hdfs(type: JavaExec) { + classpath = sourceSets.test.compileClasspath + sourceSets.test.output + main = "hdfs.MiniHDFS" + args = [ 'build/fixtures/hdfsFixture' ] +} + +// just a test fixture: we aren't using jars in releases +thirdPartyAudit.enabled = false +// TODO: add a simple HDFS client test for this fixture +test.enabled = false diff --git a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java new file mode 100644 index 00000000000..a4bf47f8eae --- /dev/null +++ b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package hdfs; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.MiniDFSCluster; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.Locale; + +import java.lang.management.ManagementFactory; + +/** + * MiniHDFS test fixture. There is a CLI tool, but here we can + * easily properly setup logging, avoid parsing JSON, etc. + */ +public class MiniHDFS { + + private static String PORT_FILE_NAME = "ports"; + private static String PID_FILE_NAME = "pid"; + + public static void main(String[] args) throws Exception { + if (args.length != 1) { + throw new IllegalArgumentException("MiniHDFS "); + } + // configure Paths + Path baseDir = Paths.get(args[0]); + // hadoop-home/, so logs will not complain + if (System.getenv("HADOOP_HOME") == null) { + Path hadoopHome = baseDir.resolve("hadoop-home"); + Files.createDirectories(hadoopHome); + System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); + } + // hdfs-data/, where any data is going + Path hdfsHome = baseDir.resolve("hdfs-data"); + + // start cluster + Configuration cfg = new Configuration(); + cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); + // lower default permission: TODO: needed? + cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); + // TODO: remove hardcoded port! + MiniDFSCluster dfs = new MiniDFSCluster.Builder(cfg).nameNodePort(9999).build(); + + // write our PID file + Path tmp = Files.createTempFile(baseDir, null, null); + String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; + Files.write(tmp, pid.getBytes(StandardCharsets.UTF_8)); + Files.move(tmp, baseDir.resolve(PID_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); + + // write our port file + tmp = Files.createTempFile(baseDir, null, null); + Files.write(tmp, Integer.toString(dfs.getNameNodePort()).getBytes(StandardCharsets.UTF_8)); + Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); + } +} diff --git a/test-framework/build.gradle b/test/framework/build.gradle similarity index 75% rename from test-framework/build.gradle rename to test/framework/build.gradle index a423f56c922..5039036be46 100644 --- a/test-framework/build.gradle +++ b/test/framework/build.gradle @@ -16,10 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -import org.elasticsearch.gradle.precommit.PrecommitTasks -apply plugin: 'elasticsearch.build' -apply plugin: 'com.bmuschko.nexus' +import org.elasticsearch.gradle.precommit.PrecommitTasks; dependencies { compile "org.elasticsearch:elasticsearch:${version}" @@ -47,3 +45,19 @@ forbiddenApisMain { // TODO: should we have licenses for our test deps? dependencyLicenses.enabled = false +thirdPartyAudit.excludes = [ + // classes are missing + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + // we intentionally exclude the ant tasks because people were depending on them from their tests!!!!!!! + 'org.apache.tools.ant.BuildException', + 'org.apache.tools.ant.DirectoryScanner', + 'org.apache.tools.ant.Task', + 'org.apache.tools.ant.types.FileSet', + 'org.easymock.EasyMock', + 'org.easymock.IArgumentMatcher', + 'org.jmock.core.Constraint', +] diff --git a/test-framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java similarity index 96% rename from test-framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java rename to test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 68784083797..b2ce5ebd86e 100644 --- a/test-framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -20,13 +20,8 @@ package org.elasticsearch.bootstrap; import com.carrotsearch.randomizedtesting.RandomizedRunner; - import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestSecurityManager; import org.elasticsearch.SecureSM; -import org.elasticsearch.bootstrap.Bootstrap; -import org.elasticsearch.bootstrap.ESPolicy; -import org.elasticsearch.bootstrap.Security; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; @@ -55,16 +50,16 @@ import java.util.Set; import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean; -/** +/** * Initializes natives and installs test security manager * (init'd early by base classes to ensure it happens regardless of which - * test case happens to be first, test ordering, etc). + * test case happens to be first, test ordering, etc). *

      * The idea is to mimic as much as possible what happens with ES in production * mode (e.g. assign permissions and install security manager the same way) */ public class BootstrapForTesting { - + // TODO: can we share more code with the non-test side here // without making things complex??? @@ -83,10 +78,10 @@ public class BootstrapForTesting { // initialize probes Bootstrap.initializeProbes(); - + // initialize sysprops BootstrapInfo.getSystemProperties(); - + // check for jar hell try { JarHell.checkJarHell(); @@ -130,7 +125,7 @@ public class BootstrapForTesting { if (System.getProperty("tests.maven") == null) { perms.add(new RuntimePermission("setIO")); } - + // add bind permissions for testing // ephemeral ports (note, on java 7 before update 51, this is a different permission) // this should really be the only one allowed for tests, otherwise they have race conditions @@ -138,7 +133,7 @@ public class BootstrapForTesting { // ... but tests are messy. like file permissions, just let them live in a fantasy for now. // TODO: cut over all tests to bind to ephemeral ports perms.add(new SocketPermission("localhost:1024-", "listen,resolve")); - + // read test-framework permissions final Policy testFramework = Security.readPolicy(Bootstrap.class.getResource("test-framework.policy"), JarHell.parseClassPath()); final Policy esPolicy = new ESPolicy(perms, getPluginPermissions(), true); @@ -172,7 +167,7 @@ public class BootstrapForTesting { } } - /** + /** * we dont know which codesources belong to which plugin, so just remove the permission from key codebases * like core, test-framework, etc. this way tests fail if accesscontroller blocks are missing. */ @@ -182,7 +177,7 @@ public class BootstrapForTesting { if (pluginPolicies.isEmpty()) { return Collections.emptyMap(); } - + // compute classpath minus obvious places, all other jars will get the permission. Set codebases = new HashSet<>(Arrays.asList(parseClassPathWithSymlinks())); Set excluded = new HashSet<>(Arrays.asList( @@ -198,13 +193,13 @@ public class BootstrapForTesting { Assert.class.getProtectionDomain().getCodeSource().getLocation() )); codebases.removeAll(excluded); - + // parse each policy file, with codebase substitution from the classpath final List policies = new ArrayList<>(); for (URL policyFile : pluginPolicies) { policies.add(Security.readPolicy(policyFile, codebases.toArray(new URL[codebases.size()]))); } - + // consult each policy file for those codebases Map map = new HashMap<>(); for (URL url : codebases) { diff --git a/test-framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java b/test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java rename to test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java diff --git a/test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java similarity index 92% rename from test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java rename to test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index 6ac2101fe52..3e9b0c09cb2 100644 --- a/test-framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -29,10 +29,11 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.monitor.fs.FsInfo; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.threadpool.ThreadPool; @@ -77,11 +78,11 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { } @Inject - public MockInternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService, + public MockInternalClusterInfoService(Settings settings, ClusterSettings clusterSettings, TransportNodesStatsAction transportNodesStatsAction, TransportIndicesStatsAction transportIndicesStatsAction, ClusterService clusterService, ThreadPool threadPool) { - super(settings, nodeSettingsService, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool); + super(settings, clusterSettings, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool); this.clusterName = ClusterName.clusterNameFromSettings(settings); stats[0] = makeStats("node_t1", new DiskUsage("node_t1", "n1", "/dev/null", 100, 100)); stats[1] = makeStats("node_t2", new DiskUsage("node_t2", "n2", "/dev/null", 100, 100)); @@ -133,4 +134,9 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { return "/dev/null"; } } + + @Override + public void setUpdateFrequency(TimeValue updateFrequency) { + super.setUpdateFrequency(updateFrequency); + } } diff --git a/test-framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java rename to test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java diff --git a/test-framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java rename to test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/common/io/FileTestUtils.java b/test/framework/src/main/java/org/elasticsearch/common/io/FileTestUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/common/io/FileTestUtils.java rename to test/framework/src/main/java/org/elasticsearch/common/io/FileTestUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java b/test/framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java rename to test/framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java diff --git a/test-framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java rename to test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java diff --git a/test-framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java rename to test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java b/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java rename to test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java diff --git a/test-framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/node/MockNode.java rename to test/framework/src/main/java/org/elasticsearch/node/MockNode.java diff --git a/test-framework/src/main/java/org/elasticsearch/node/NodeMocksPlugin.java b/test/framework/src/main/java/org/elasticsearch/node/NodeMocksPlugin.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/node/NodeMocksPlugin.java rename to test/framework/src/main/java/org/elasticsearch/node/NodeMocksPlugin.java diff --git a/test-framework/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java b/test/framework/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java rename to test/framework/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java diff --git a/test-framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java b/test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java rename to test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java diff --git a/test-framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java similarity index 97% rename from test-framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java rename to test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index bfd40900456..0096fcf9b5f 100644 --- a/test-framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -22,7 +22,6 @@ package org.elasticsearch.script; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.lookup.SearchLookup; @@ -72,7 +71,7 @@ public class MockScriptEngine implements ScriptEngineService { } @Override - public Object compile(String script) { + public Object compile(String script, Map params) { return script; } diff --git a/test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java similarity index 82% rename from test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java rename to test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java index 9a7a3efa3dc..98b5181636d 100644 --- a/test-framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -22,12 +22,12 @@ package org.elasticsearch.search; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.cache.request.IndicesRequestCache; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.dfs.DfsPhase; @@ -67,13 +67,13 @@ public class MockSearchService extends SearchService { } @Inject - public MockSearchService(Settings settings, NodeSettingsService nodeSettingsService, ClusterService clusterService, IndicesService indicesService, IndicesWarmer indicesWarmer, - ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, - DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase, IndicesRequestCache indicesQueryCache) { - super(settings, nodeSettingsService, clusterService, indicesService, indicesWarmer, threadPool, scriptService, pageCacheRecycler, bigArrays, dfsPhase, + public MockSearchService(Settings settings, ClusterSettings clusterSettings, ClusterService clusterService, IndicesService indicesService, IndicesWarmer indicesWarmer, + ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, + DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase, IndicesRequestCache indicesQueryCache) { + super(settings, clusterSettings, clusterService, indicesService, indicesWarmer, threadPool, scriptService, pageCacheRecycler, bigArrays, dfsPhase, queryPhase, fetchPhase, indicesQueryCache); } - + @Override protected void putContext(SearchContext context) { super.putContext(context); diff --git a/test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java rename to test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptNoParams.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptNoParams.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptNoParams.java rename to test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptNoParams.java diff --git a/test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptWithParams.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptWithParams.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptWithParams.java rename to test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptWithParams.java diff --git a/test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/TestScript.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/TestScript.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/TestScript.java rename to test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/script/TestScript.java diff --git a/test-framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericTestCase.java rename to test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericTestCase.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java rename to test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java similarity index 98% rename from test-framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java rename to test/framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java index caa414e071c..2148d0a71c5 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CompositeTestCluster.java @@ -30,7 +30,12 @@ import org.elasticsearch.common.transport.TransportAddress; import java.io.IOException; import java.net.InetSocketAddress; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.Random; import java.util.stream.Collectors; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; diff --git a/test-framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java similarity index 95% rename from test-framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java index bf9ccc957bc..a630f24214d 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java @@ -20,7 +20,11 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.store.*; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; diff --git a/test-framework/src/main/java/org/elasticsearch/test/DummyShardLock.java b/test/framework/src/main/java/org/elasticsearch/test/DummyShardLock.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/DummyShardLock.java rename to test/framework/src/main/java/org/elasticsearch/test/DummyShardLock.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java similarity index 89% rename from test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java index e82823ae997..5dc824f687d 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.EmptyClusterInfoService; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; @@ -37,6 +38,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -44,11 +46,14 @@ import org.elasticsearch.gateway.AsyncShardFetch; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.ReplicaShardAllocator; import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; -import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.test.gateway.NoopGatewayAllocator; import java.lang.reflect.Constructor; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Random; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; @@ -68,37 +73,37 @@ public abstract class ESAllocationTestCase extends ESTestCase { } public static MockAllocationService createAllocationService(Settings settings, Random random) { - return createAllocationService(settings, new NodeSettingsService(Settings.Builder.EMPTY_SETTINGS), random); + return createAllocationService(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), random); } - public static MockAllocationService createAllocationService(Settings settings, NodeSettingsService nodeSettingsService, Random random) { + public static MockAllocationService createAllocationService(Settings settings, ClusterSettings clusterSettings, Random random) { return new MockAllocationService(settings, - randomAllocationDeciders(settings, nodeSettingsService, random), + randomAllocationDeciders(settings, clusterSettings, random), new ShardsAllocators(settings, NoopGatewayAllocator.INSTANCE), EmptyClusterInfoService.INSTANCE); } public static MockAllocationService createAllocationService(Settings settings, ClusterInfoService clusterInfoService) { return new MockAllocationService(settings, - randomAllocationDeciders(settings, new NodeSettingsService(Settings.Builder.EMPTY_SETTINGS), getRandom()), + randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), new ShardsAllocators(settings, NoopGatewayAllocator.INSTANCE), clusterInfoService); } public static MockAllocationService createAllocationService(Settings settings, GatewayAllocator allocator) { return new MockAllocationService(settings, - randomAllocationDeciders(settings, new NodeSettingsService(Settings.Builder.EMPTY_SETTINGS), getRandom()), + randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), new ShardsAllocators(settings, allocator), EmptyClusterInfoService.INSTANCE); } - public static AllocationDeciders randomAllocationDeciders(Settings settings, NodeSettingsService nodeSettingsService, Random random) { + public static AllocationDeciders randomAllocationDeciders(Settings settings, ClusterSettings clusterSettings, Random random) { final List> defaultAllocationDeciders = ClusterModule.DEFAULT_ALLOCATION_DECIDERS; final List list = new ArrayList<>(); for (Class deciderClass : ClusterModule.DEFAULT_ALLOCATION_DECIDERS) { try { try { - Constructor constructor = deciderClass.getConstructor(Settings.class, NodeSettingsService.class); - list.add(constructor.newInstance(settings, nodeSettingsService)); + Constructor constructor = deciderClass.getConstructor(Settings.class, ClusterSettings.class); + list.add(constructor.newInstance(settings, clusterSettings)); } catch (NoSuchMethodException e) { Constructor constructor = null; constructor = deciderClass.getConstructor(Settings.class); @@ -230,7 +235,8 @@ public abstract class ESAllocationTestCase extends ESTestCase { boolean changed = false; while (unassignedIterator.hasNext()) { ShardRouting shard = unassignedIterator.next(); - if (shard.primary() || shard.allocatedPostIndexCreate() == false) { + IndexMetaData indexMetaData = allocation.metaData().index(shard.getIndex()); + if (shard.primary() || shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } changed |= replicaShardAllocator.ignoreUnassignedIfDelayed(unassignedIterator, shard); diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java similarity index 97% rename from test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java index 3e5c903a1ba..49644196da4 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java @@ -26,14 +26,12 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.junit.listeners.LoggingListener; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportModule; import java.io.IOException; import java.lang.annotation.ElementType; @@ -46,7 +44,6 @@ import java.nio.file.Path; import java.util.Collection; import java.util.Collections; import java.util.Map; -import java.util.Random; import static org.hamcrest.Matchers.is; @@ -181,6 +178,11 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { return externalNodeSettings(nodeOrdinal); } + @Override + public Collection> nodePlugins() { + return Collections.emptyList(); + } + @Override public Settings transportClientSettings() { return transportClientSettings(); @@ -238,7 +240,7 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { protected Settings commonNodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder().put(requiredSettings()); - builder.put(TransportModule.TRANSPORT_TYPE_KEY, "netty"); // run same transport / disco as external + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty"); // run same transport / disco as external builder.put("node.mode", "network"); return builder.build(); } diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java similarity index 97% rename from test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 37a8fd388b8..4463f8066ee 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -20,15 +20,12 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.RandomizedTest; -import com.carrotsearch.randomizedtesting.Randomness; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.http.impl.client.HttpClients; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.SuppressForbidden; import org.apache.lucene.util.TestUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; @@ -36,7 +33,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; @@ -44,8 +40,8 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder; @@ -63,6 +59,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -97,6 +94,8 @@ import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.MockEngineFactoryPlugin; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -106,20 +105,22 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.index.translog.TranslogConfig; -import org.elasticsearch.index.translog.TranslogWriter; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.cache.request.IndicesRequestCache; -import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.node.Node; +import org.elasticsearch.node.NodeMocksPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.MockSearchService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.client.RandomizingClient; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; +import org.elasticsearch.test.store.MockFSIndexStore; +import org.elasticsearch.test.transport.AssertingLocalTransport; +import org.elasticsearch.test.transport.MockTransportService; import org.hamcrest.Matchers; import org.joda.time.DateTimeZone; import org.junit.After; @@ -137,9 +138,7 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.net.InetAddress; import java.net.InetSocketAddress; -import java.net.MalformedURLException; import java.net.URL; -import java.net.UnknownHostException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; @@ -162,6 +161,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BooleanSupplier; +import static org.elasticsearch.client.Requests.syncedFlushRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -510,25 +510,21 @@ public abstract class ESIntegTestCase extends ESTestCase { } private static Settings.Builder setRandomIndexTranslogSettings(Random random, Settings.Builder builder) { - if (random.nextBoolean()) { - builder.put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, RandomInts.randomIntBetween(random, 1, 10000)); - } if (random.nextBoolean()) { builder.put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 300), ByteSizeUnit.MB)); } if (random.nextBoolean()) { - builder.put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, random.nextBoolean()); + builder.put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush } if (random.nextBoolean()) { - builder.put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, RandomPicks.randomFrom(random, Translog.Durabilty.values())); + builder.put(IndexSettings.INDEX_TRANSLOG_DURABILITY, RandomPicks.randomFrom(random, Translog.Durability.values())); } if (random.nextBoolean()) { - builder.put(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, RandomPicks.randomFrom(random, TranslogWriter.Type.values())); if (rarely(random)) { - builder.put(TranslogConfig.INDEX_TRANSLOG_SYNC_INTERVAL, 0); // 0 has special meaning to sync each op + builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, 0); // 0 has special meaning to sync each op } else { - builder.put(TranslogConfig.INDEX_TRANSLOG_SYNC_INTERVAL, RandomInts.randomIntBetween(random, 100, 5000), TimeUnit.MILLISECONDS); + builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, RandomInts.randomIntBetween(random, 100, 5000), TimeUnit.MILLISECONDS); } } @@ -1042,7 +1038,7 @@ public abstract class ESIntegTestCase extends ESTestCase { */ public void setMinimumMasterNodes(int n) { assertTrue(client().admin().cluster().prepareUpdateSettings().setTransientSettings( - settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, n)) + settingsBuilder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), n)) .get().isAcknowledged()); } @@ -1237,10 +1233,10 @@ public abstract class ESIntegTestCase extends ESTestCase { * * @see #waitForRelocation() */ - protected final RefreshResponse refresh() { + protected final RefreshResponse refresh(String... indices) { waitForRelocation(); // TODO RANDOMIZE with flush? - RefreshResponse actionGet = client().admin().indices().prepareRefresh().execute().actionGet(); + RefreshResponse actionGet = client().admin().indices().prepareRefresh(indices).execute().actionGet(); assertNoFailures(actionGet); return actionGet; } @@ -1250,7 +1246,7 @@ public abstract class ESIntegTestCase extends ESTestCase { */ protected final void flushAndRefresh(String... indices) { flush(indices); - refresh(); + refresh(indices); } /** @@ -1451,18 +1447,6 @@ public abstract class ESIntegTestCase extends ESTestCase { private AtomicInteger dummmyDocIdGenerator = new AtomicInteger(); - /** Disables translog flushing for the specified index */ - public static void disableTranslogFlush(String index) { - Settings settings = Settings.builder().put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true).build(); - client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); - } - - /** Enables translog flushing for the specified index */ - public static void enableTranslogFlush(String index) { - Settings settings = Settings.builder().put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, false).build(); - client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); - } - /** Disables an index block for the specified index */ public static void disableIndexBlock(String index, String block) { Settings settings = Settings.builder().put(block, false).build(); @@ -1477,7 +1461,7 @@ public abstract class ESIntegTestCase extends ESTestCase { /** Sets or unsets the cluster read_only mode **/ public static void setClusterReadOnly(boolean value) { - Settings settings = settingsBuilder().put(MetaData.SETTING_READ_ONLY, value).build(); + Settings settings = settingsBuilder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), value).build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get()); } @@ -1499,13 +1483,13 @@ public abstract class ESIntegTestCase extends ESTestCase { if (randomBoolean()) { client().admin().indices().prepareFlush(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute( new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); - } else if (isInternalCluster()) { - internalCluster().getInstance(SyncedFlushService.class).attemptSyncedFlush(indices, IndicesOptions.lenientExpandOpen(), - new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); + } else { + client().admin().indices().syncedFlush(syncedFlushRequest(indices).indicesOptions(IndicesOptions.lenientExpandOpen()), + new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); } } else if (rarely()) { client().admin().indices().prepareForceMerge(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).setMaxNumSegments(between(1, 10)).setFlush(maybeFlush && randomBoolean()).execute( - new LatchedActionListener(newLatch(inFlightAsyncOperations))); + new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); } } while (inFlightAsyncOperations.size() > MAX_IN_FLIGHT_ASYNC_INDEXES) { @@ -1690,8 +1674,8 @@ public abstract class ESIntegTestCase extends ESTestCase { Settings.Builder builder = settingsBuilder() // Default the watermarks to absurdly low to prevent the tests // from failing on nodes without enough disk space - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "1b") - .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "1b") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b") .put("script.indexed", "on") .put("script.inline", "on") // wait short time for other active shards before actually deleting, default 30s not needed in tests @@ -1809,14 +1793,21 @@ public abstract class ESIntegTestCase extends ESTestCase { nodeMode = "local"; } - boolean enableMockModules = enableMockModules(); + Collection> mockPlugins = getMockPlugins(); + return new InternalTestCluster(nodeMode, seed, createTempDir(), minNumDataNodes, maxNumDataNodes, InternalTestCluster.clusterName(scope.name(), seed) + "-cluster", nodeConfigurationSource, getNumClientNodes(), - InternalTestCluster.DEFAULT_ENABLE_HTTP_PIPELINING, nodePrefix, enableMockModules); + InternalTestCluster.DEFAULT_ENABLE_HTTP_PIPELINING, nodePrefix, mockPlugins); } - protected boolean enableMockModules() { - return RandomizedTest.systemPropertyAsBoolean(TESTS_ENABLE_MOCK_MODULES, true); + /** Return the mock plugins the cluster should use. These may be randomly omitted based on the cluster seed. */ + protected Collection> getMockPlugins() { + return pluginList(MockTransportService.TestPlugin.class, + MockFSIndexStore.TestPlugin.class, + NodeMocksPlugin.class, + MockEngineFactoryPlugin.class, + MockSearchService.TestPlugin.class, + AssertingLocalTransport.TestPlugin.class); } /** diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java similarity index 80% rename from test-framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 287bd121c90..9b06bae21b0 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.test; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.cache.recycler.PageCacheRecycler; @@ -37,15 +38,22 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.IndexService; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.AfterClass; +import org.junit.Before; import org.junit.BeforeClass; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -59,13 +67,13 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { private static Node NODE = null; - private static void reset() { + private void reset() { assert NODE != null; stopNode(); startNode(); } - private static void startNode() { + private void startNode() { assert NODE == null; NODE = newNode(); // we must wait for the node to actually be up and running. otherwise the node might have started, elected itself master but might not yet have removed the @@ -80,7 +88,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { Releasables.close(node); } - static void cleanup(boolean resetNode) { + private void cleanup(boolean resetNode) { assertAcked(client().admin().indices().prepareDelete("*").get()); if (resetNode) { reset(); @@ -92,7 +100,19 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { metaData.transientSettings().getAsMap().size(), equalTo(0)); } + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + // Create the node lazily, on the first test. This is ok because we do not randomize any settings, + // only the cluster name. This allows us to have overriden properties for plugins and the version to use. + if (NODE == null) { + startNode(); + } + } + @After + @Override public void tearDown() throws Exception { logger.info("[{}#{}]: cleaning up after test", getTestClass().getSimpleName(), getTestName()); super.tearDown(); @@ -102,7 +122,6 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { @BeforeClass public static void setUpClass() throws Exception { stopNode(); - startNode(); } @AfterClass @@ -119,25 +138,42 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { return false; } - private static Node newNode() { - Node build = new Node(Settings.builder() - .put(ClusterName.SETTING, InternalTestCluster.clusterName("single-node-cluster", randomLong())) - .put("path.home", createTempDir()) - // TODO: use a consistent data path for custom paths - // This needs to tie into the ESIntegTestCase#indexSettings() method - .put("path.shared_data", createTempDir().getParent()) - .put("node.name", nodeName()) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put("script.inline", "on") - .put("script.indexed", "on") - .put(EsExecutors.PROCESSORS, 1) // limit the number of threads created - .put("http.enabled", false) - .put("node.local", true) - .put("node.data", true) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :) - .build() - ); + /** The version of elasticsearch the node should act like. */ + protected Version getVersion() { + return Version.CURRENT; + } + + /** The plugin classes that should be added to the node. */ + protected Collection> getPlugins() { + return Collections.emptyList(); + } + + /** Helper method to create list of plugins without specifying generic types. */ + @SafeVarargs + @SuppressWarnings("varargs") // due to type erasure, the varargs type is non-reifiable, which casues this warning + protected final Collection> pluginList(Class... plugins) { + return Arrays.asList(plugins); + } + + private Node newNode() { + Settings settings = Settings.builder() + .put(ClusterName.SETTING, InternalTestCluster.clusterName("single-node-cluster", randomLong())) + .put("path.home", createTempDir()) + // TODO: use a consistent data path for custom paths + // This needs to tie into the ESIntegTestCase#indexSettings() method + .put("path.shared_data", createTempDir().getParent()) + .put("node.name", nodeName()) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put("script.inline", "on") + .put("script.indexed", "on") + .put(EsExecutors.PROCESSORS, 1) // limit the number of threads created + .put("http.enabled", false) + .put("node.local", true) + .put("node.data", true) + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :) + .build(); + Node build = new MockNode(settings, getVersion(), getPlugins()); build.start(); assertThat(DiscoveryNode.localNode(build.settings()), is(true)); return build; diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java similarity index 99% rename from test-framework/src/main/java/org/elasticsearch/test/ESTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index e1443110c0d..3777653297e 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -29,14 +29,12 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; - import org.apache.lucene.uninverting.UninvertingReader; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.TestRuleMarkFailure; import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TimeUnits; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.bootstrap.BootstrapForTesting; import org.elasticsearch.cache.recycler.MockPageCacheRecycler; @@ -50,7 +48,6 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -69,7 +66,11 @@ import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java similarity index 99% rename from test-framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java index ed54ae60fbd..1af9fa5ba7e 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java @@ -21,7 +21,6 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.annotations.Listeners; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; - import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TimeUnits; @@ -46,7 +45,7 @@ public abstract class ESTokenStreamTestCase extends BaseTokenStreamTestCase { static { BootstrapForTesting.ensureInitialized(); } - + public static Version randomVersion() { return VersionUtils.randomVersion(random()); } diff --git a/test-framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java similarity index 98% rename from test-framework/src/main/java/org/elasticsearch/test/ExternalNode.java rename to test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java index 6ab39a5b139..05f194fc26a 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -28,11 +28,11 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.node.internal.InternalSettingsPreparer; -import org.elasticsearch.transport.TransportModule; import java.io.Closeable; import java.io.IOException; @@ -111,9 +111,9 @@ final class ExternalNode implements Closeable { case "path.home": case "node.mode": case "node.local": - case TransportModule.TRANSPORT_TYPE_KEY: + case NetworkModule.TRANSPORT_TYPE_KEY: case DiscoveryModule.DISCOVERY_TYPE_KEY: - case TransportModule.TRANSPORT_SERVICE_TYPE_KEY: + case NetworkModule.TRANSPORT_SERVICE_TYPE_KEY: case InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING: continue; default: diff --git a/test-framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java rename to test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java b/test/framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java rename to test/framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java rename to test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java similarity index 96% rename from test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java rename to test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 7ae3226b66a..ea2796aad84 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -65,7 +65,6 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; @@ -78,15 +77,12 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; -import org.elasticsearch.node.NodeMocksPlugin; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.MockSearchService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; -import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.AssertingLocalTransport; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.Transport; @@ -98,7 +94,19 @@ import java.io.Closeable; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.NavigableMap; +import java.util.Random; +import java.util.Set; +import java.util.TreeMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; @@ -110,11 +118,14 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import static junit.framework.Assert.fail; -import static org.apache.lucene.util.LuceneTestCase.*; +import static org.apache.lucene.util.LuceneTestCase.TEST_NIGHTLY; +import static org.apache.lucene.util.LuceneTestCase.rarely; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESTestCase.assertBusy; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.junit.Assert.assertThat; /** @@ -199,7 +210,7 @@ public final class InternalTestCluster extends TestCluster { private final ExecutorService executor; - private final boolean enableMockModules; + private final Collection> mockPlugins; /** * All nodes started by the cluster will have their name set to nodePrefix followed by a positive number @@ -212,7 +223,7 @@ public final class InternalTestCluster extends TestCluster { public InternalTestCluster(String nodeMode, long clusterSeed, Path baseDir, int minNumDataNodes, int maxNumDataNodes, String clusterName, NodeConfigurationSource nodeConfigurationSource, int numClientNodes, - boolean enableHttpPipelining, String nodePrefix, boolean enableMockModules) { + boolean enableHttpPipelining, String nodePrefix, Collection> mockPlugins) { super(clusterSeed); if ("network".equals(nodeMode) == false && "local".equals(nodeMode) == false) { throw new IllegalArgumentException("Unknown nodeMode: " + nodeMode); @@ -248,7 +259,7 @@ public final class InternalTestCluster extends TestCluster { this.nodePrefix = nodePrefix; assert nodePrefix != null; - this.enableMockModules = enableMockModules; + this.mockPlugins = mockPlugins; /* * TODO @@ -292,19 +303,17 @@ public final class InternalTestCluster extends TestCluster { } // Default the watermarks to absurdly low to prevent the tests // from failing on nodes without enough disk space - builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "1b"); - builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "1b"); + builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b"); + builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b"); if (TEST_NIGHTLY) { - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, RandomInts.randomIntBetween(random, 10, 15)); - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, RandomInts.randomIntBetween(random, 10, 15)); - builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, RandomInts.randomIntBetween(random, 5, 10)); + builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 5, 10)); + builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 5, 10)); } else if (random.nextInt(100) <= 90) { - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, RandomInts.randomIntBetween(random, 3, 6)); - builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, RandomInts.randomIntBetween(random, 3, 6)); - builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, RandomInts.randomIntBetween(random, 2, 5)); + builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 2, 5)); + builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), RandomInts.randomIntBetween(random, 2, 5)); } // always reduce this - it can make tests really slow - builder.put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 20, 50))); + builder.put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(), TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 20, 50))); defaultSettings = builder.build(); executor = EsExecutors.newCached("test runner", 0, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory("test_" + clusterName)); } @@ -359,16 +368,10 @@ public final class InternalTestCluster extends TestCluster { private Collection> getPlugins(long seed) { Set> plugins = new HashSet<>(nodeConfigurationSource.nodePlugins()); - Random random = new Random(seed); - if (enableMockModules && usually(random)) { - plugins.add(MockTransportService.TestPlugin.class); - plugins.add(MockFSIndexStore.TestPlugin.class); - plugins.add(NodeMocksPlugin.class); - plugins.add(MockEngineFactoryPlugin.class); - plugins.add(MockSearchService.TestPlugin.class); - if (isLocalTransportConfigured()) { - plugins.add(AssertingLocalTransport.TestPlugin.class); - } + plugins.addAll(mockPlugins); + if (isLocalTransportConfigured() == false) { + // this is crazy we must do this here...we should really just always be using local transport... + plugins.remove(AssertingLocalTransport.TestPlugin.class); } return plugins; } @@ -378,7 +381,7 @@ public final class InternalTestCluster extends TestCluster { Builder builder = Settings.settingsBuilder() .put(SETTING_CLUSTER_NODE_SEED, seed); if (isLocalTransportConfigured() == false) { - builder.put(Transport.TransportSettings.TRANSPORT_TCP_COMPRESS, rarely(random)); + builder.put(Transport.TRANSPORT_TCP_COMPRESS.getKey(), rarely(random)); } if (random.nextBoolean()) { builder.put("cache.recycler.page.type", RandomPicks.randomFrom(random, PageCacheRecycler.Type.values())); @@ -412,12 +415,12 @@ public final class InternalTestCluster extends TestCluster { } if (random.nextBoolean()) { - builder.put(MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT, new TimeValue(RandomInts.randomIntBetween(random, 10, 30), TimeUnit.SECONDS)); + builder.put(MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING.getKey(), new TimeValue(RandomInts.randomIntBetween(random, 10, 30), TimeUnit.SECONDS)); } if (random.nextInt(10) == 0) { - builder.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, "noop"); - builder.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, "noop"); + builder.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), "noop"); + builder.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), "noop"); } if (random.nextBoolean()) { @@ -430,20 +433,20 @@ public final class InternalTestCluster extends TestCluster { if (random.nextBoolean()) { if (random.nextInt(10) == 0) { // do something crazy slow here - builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 10), ByteSizeUnit.MB)); + builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 10), ByteSizeUnit.MB)); } else { - builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, new ByteSizeValue(RandomInts.randomIntBetween(random, 10, 200), ByteSizeUnit.MB)); + builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 10, 200), ByteSizeUnit.MB)); } } if (random.nextBoolean()) { - builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE, RandomPicks.randomFrom(random, StoreRateLimiting.Type.values())); + builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING.getKey(), RandomPicks.randomFrom(random, StoreRateLimiting.Type.values())); } if (random.nextBoolean()) { if (random.nextInt(10) == 0) { // do something crazy slow here - builder.put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 10), ByteSizeUnit.MB)); + builder.put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 10), ByteSizeUnit.MB)); } else { - builder.put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, new ByteSizeValue(RandomInts.randomIntBetween(random, 10, 200), ByteSizeUnit.MB)); + builder.put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 10, 200), ByteSizeUnit.MB)); } } diff --git a/test-framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java rename to test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java b/test/framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java similarity index 67% rename from test-framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java rename to test/framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java index e04e840e525..5dfb845c192 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java +++ b/test/framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java @@ -19,10 +19,18 @@ package org.elasticsearch.test; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.MockEngineFactoryPlugin; +import org.elasticsearch.node.NodeMocksPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.MockSearchService; +import org.elasticsearch.test.store.MockFSIndexStore; +import org.elasticsearch.test.transport.AssertingLocalTransport; +import org.elasticsearch.test.transport.MockTransportService; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.List; public abstract class NodeConfigurationSource { @@ -43,6 +51,18 @@ public abstract class NodeConfigurationSource { */ public abstract Settings nodeSettings(int nodeOrdinal); + /** Plugins that will be randomly added to the node */ + public Collection> mockPlugins() { + List> plugins = new ArrayList<>(); + plugins.add(MockTransportService.TestPlugin.class); + plugins.add(MockFSIndexStore.TestPlugin.class); + plugins.add(NodeMocksPlugin.class); + plugins.add(MockEngineFactoryPlugin.class); + plugins.add(MockSearchService.TestPlugin.class); + plugins.add(AssertingLocalTransport.TestPlugin.class); + return plugins; + } + /** Returns plugins that should be loaded on the node */ public Collection> nodePlugins() { return Collections.emptyList(); diff --git a/test-framework/src/main/java/org/elasticsearch/test/StreamsUtils.java b/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/StreamsUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java index 3bace95c238..10469286e1a 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/StreamsUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java @@ -19,7 +19,6 @@ package org.elasticsearch.test; -import java.nio.charset.StandardCharsets; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -27,6 +26,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; public class StreamsUtils { diff --git a/test-framework/src/main/java/org/elasticsearch/test/TestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java similarity index 99% rename from test-framework/src/main/java/org/elasticsearch/test/TestCluster.java rename to test/framework/src/main/java/org/elasticsearch/test/TestCluster.java index 858fbab9ab5..a05309a8a51 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/TestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java @@ -37,7 +37,7 @@ import java.net.InetSocketAddress; import java.util.Random; import java.util.Set; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; /** * Base test cluster that exposes the basis to run tests against any elasticsearch cluster, whose layout diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestCustomMetaData.java b/test/framework/src/main/java/org/elasticsearch/test/TestCustomMetaData.java new file mode 100644 index 00000000000..92d5b95cfac --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/TestCustomMetaData.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.cluster.AbstractDiffable; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +public abstract class TestCustomMetaData extends AbstractDiffable implements MetaData.Custom { + private final String data; + + protected TestCustomMetaData(String data) { + this.data = data; + } + + public String getData() { + return data; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + TestCustomMetaData that = (TestCustomMetaData) o; + + if (!data.equals(that.data)) return false; + + return true; + } + + @Override + public int hashCode() { + return data.hashCode(); + } + + protected abstract TestCustomMetaData newTestCustomMetaData(String data); + + @Override + public MetaData.Custom readFrom(StreamInput in) throws IOException { + return newTestCustomMetaData(in.readString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(getData()); + } + + @Override + public MetaData.Custom fromXContent(XContentParser parser) throws IOException { + XContentParser.Token token; + String data = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + String currentFieldName = parser.currentName(); + if ("data".equals(currentFieldName)) { + if (parser.nextToken() != XContentParser.Token.VALUE_STRING) { + throw new ElasticsearchParseException("failed to parse snapshottable metadata, invalid data type"); + } + data = parser.text(); + } else { + throw new ElasticsearchParseException("failed to parse snapshottable metadata, unknown field [{}]", currentFieldName); + } + } else { + throw new ElasticsearchParseException("failed to parse snapshottable metadata"); + } + } + if (data == null) { + throw new ElasticsearchParseException("failed to parse snapshottable metadata, data not found"); + } + return newTestCustomMetaData(data); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("data", getData()); + return builder; + } +} diff --git a/test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java similarity index 96% rename from test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java rename to test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 468b1877250..796872bd350 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -19,7 +19,6 @@ package org.elasticsearch.test; import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; - import org.apache.lucene.search.Collector; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; @@ -35,7 +34,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; @@ -60,6 +58,7 @@ import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -293,7 +292,7 @@ public class TestSearchContext extends SearchContext { } public void setSearcher(Engine.Searcher searcher) { - this.searcher = new ContextIndexSearcher(this, searcher); + this.searcher = new ContextIndexSearcher(searcher, indexService.cache().query(), indexShard.getQueryCachingPolicy()); } @Override @@ -549,18 +548,11 @@ public class TestSearchContext extends SearchContext { return null; } + @Override public MappedFieldType smartNameFieldType(String name) { if (mapperService() != null) { - return mapperService().smartNameFieldType(name, types()); - } - return null; - } - - @Override - public MappedFieldType smartNameFieldTypeFromAnyType(String name) { - if (mapperService() != null) { - return mapperService().smartNameFieldType(name); + return mapperService().fullName(name); } return null; } @@ -568,7 +560,7 @@ public class TestSearchContext extends SearchContext { @Override public ObjectMapper getObjectMapper(String name) { if (mapperService() != null) { - return mapperService().getObjectMapper(name, types); + return mapperService().getObjectMapper(name); } return null; } @@ -660,8 +652,11 @@ public class TestSearchContext extends SearchContext { public void copyContextAndHeadersFrom(HasContextAndHeaders other) {} @Override - public Map, Collector> queryCollectors() {return queryCollectors;} + public Profilers getProfilers() { + return null; // no profiling + } @Override - public QueryCache getQueryCache() { return indexService.cache().query();} + public Map, Collector> queryCollectors() {return queryCollectors;} + } diff --git a/test-framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/VersionUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java rename to test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java similarity index 86% rename from test-framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java rename to test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java index cb3d643f555..a19d19dcf47 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java @@ -19,7 +19,16 @@ package org.elasticsearch.test.cluster; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.LocalNodeMasterListener; +import org.elasticsearch.cluster.TimeoutClusterStateListener; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -29,6 +38,7 @@ import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.TaskManager; import java.util.List; @@ -138,6 +148,11 @@ public class NoopClusterService implements ClusterService { return TimeValue.timeValueMillis(0); } + @Override + public TaskManager getTaskManager() { + return null; + } + @Override public Lifecycle.State lifecycleState() { return null; diff --git a/test-framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java similarity index 88% rename from test-framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java rename to test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java index 5dc8cce99c6..92b5f9a584b 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java @@ -20,7 +20,17 @@ package org.elasticsearch.test.cluster; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.LocalNodeMasterListener; +import org.elasticsearch.cluster.TimeoutClusterStateListener; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -37,9 +47,13 @@ import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; -import java.util.*; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Queue; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledFuture; @@ -47,6 +61,7 @@ import java.util.concurrent.ScheduledFuture; public class TestClusterService implements ClusterService { volatile ClusterState state; + private volatile TaskManager taskManager; private final List listeners = new CopyOnWriteArrayList<>(); private final Queue onGoingTimeouts = ConcurrentCollections.newQueue(); private final ThreadPool threadPool; @@ -59,6 +74,7 @@ public class TestClusterService implements ClusterService { public TestClusterService(ThreadPool threadPool) { this(ClusterState.builder(new ClusterName("test")).build(), threadPool); + taskManager = new TaskManager(Settings.EMPTY); } public TestClusterService(ClusterState state) { @@ -171,9 +187,11 @@ public class TestClusterService implements ClusterService { if (threadPool == null) { throw new UnsupportedOperationException("TestClusterService wasn't initialized with a thread pool"); } - NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout); - notifyTimeout.future = threadPool.schedule(timeout, ThreadPool.Names.GENERIC, notifyTimeout); - onGoingTimeouts.add(notifyTimeout); + if (timeout != null) { + NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout); + notifyTimeout.future = threadPool.schedule(timeout, ThreadPool.Names.GENERIC, notifyTimeout); + onGoingTimeouts.add(notifyTimeout); + } listeners.add(listener); listener.postAdded(); } @@ -215,6 +233,11 @@ public class TestClusterService implements ClusterService { throw new UnsupportedOperationException(); } + @Override + public TaskManager getTaskManager() { + return taskManager; + } + @Override public List pendingTasks() { throw new UnsupportedOperationException(); diff --git a/test-framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java rename to test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkDelaysPartition.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDelaysPartition.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkDelaysPartition.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDelaysPartition.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisconnectPartition.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisconnectPartition.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisconnectPartition.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisconnectPartition.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartitionIT.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartitionIT.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartitionIT.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartitionIT.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkUnresponsivePartition.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkUnresponsivePartition.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NetworkUnresponsivePartition.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkUnresponsivePartition.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/NoOpDisruptionScheme.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NoOpDisruptionScheme.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/NoOpDisruptionScheme.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/NoOpDisruptionScheme.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/ServiceDisruptionScheme.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/ServiceDisruptionScheme.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/ServiceDisruptionScheme.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/ServiceDisruptionScheme.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java rename to test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java similarity index 97% rename from test-framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java index 0187d4ac035..a0f027bcbd8 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java @@ -19,8 +19,6 @@ package org.elasticsearch.test.engine; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.index.engine.Engine; diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java similarity index 81% rename from test-framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java index de51670f57d..87a12791bc1 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineFactory.java @@ -19,19 +19,10 @@ package org.elasticsearch.test.engine; import org.apache.lucene.index.FilterDirectoryReader; -import org.elasticsearch.common.inject.BindingAnnotation; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineFactory; -import java.lang.annotation.Retention; -import java.lang.annotation.Target; - -import static java.lang.annotation.ElementType.FIELD; -import static java.lang.annotation.ElementType.PARAMETER; -import static java.lang.annotation.RetentionPolicy.RUNTIME; - public final class MockEngineFactory implements EngineFactory { private final Class wrapper; diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java similarity index 97% rename from test-framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index 70dfa6847b4..37ccbf47538 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -21,7 +21,11 @@ package org.elasticsearch.test.engine; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.*; +import org.apache.lucene.search.AssertingIndexSearcher; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.QueryCache; +import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.SearcherManager; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.logging.ESLogger; @@ -49,7 +53,7 @@ public final class MockEngineSupport { public static final String WRAP_READER_RATIO = "index.engine.mock.random.wrap_reader_ratio"; public static final String READER_WRAPPER_TYPE = "index.engine.mock.random.wrapper"; public static final String FLUSH_ON_CLOSE_RATIO = "index.engine.mock.flush_on_close.ratio"; - + private final AtomicBoolean closing = new AtomicBoolean(false); private final ESLogger logger = Loggers.getLogger(Engine.class); private final ShardId shardId; diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java similarity index 91% rename from test-framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java index f05f69bf275..2116dcc390c 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockShadowEngine.java @@ -19,18 +19,13 @@ package org.elasticsearch.test.engine; -import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; -import org.apache.lucene.search.AssertingIndexSearcher; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SearcherManager; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.ShadowEngine; -import java.io.IOException; -import java.util.Map; - final class MockShadowEngine extends ShadowEngine { private final MockEngineSupport support; diff --git a/test-framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java similarity index 93% rename from test-framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java rename to test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java index 422b9375a1e..73281b3f6ea 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java @@ -19,8 +19,16 @@ package org.elasticsearch.test.engine; -import org.apache.lucene.index.*; -import org.apache.lucene.util.Bits; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.FilterLeafReader; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.CompiledAutomaton; diff --git a/test-framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java b/test/framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java rename to test/framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionAssertions.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionAssertions.java rename to test/framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionAssertions.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionMatchers.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionMatchers.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionMatchers.java rename to test/framework/src/main/java/org/elasticsearch/test/hamcrest/CollectionMatchers.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java rename to test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java similarity index 99% rename from test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java rename to test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java index 1853d291c6d..d86791fa6bd 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java @@ -22,7 +22,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.rest.client.http.HttpResponse; import org.hamcrest.Description; -import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; public class ElasticsearchMatchers { diff --git a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/RegexMatcher.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/RegexMatcher.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/hamcrest/RegexMatcher.java rename to test/framework/src/main/java/org/elasticsearch/test/hamcrest/RegexMatcher.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java b/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java similarity index 91% rename from test-framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java rename to test/framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java index d2615eabcac..b999f24822b 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/Network.java @@ -20,7 +20,9 @@ package org.elasticsearch.test.junit.annotations; import com.carrotsearch.randomizedtesting.annotations.TestGroup; -import java.lang.annotation.*; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; /** * Annotation used to set if internet network connectivity is required to run the test. diff --git a/test-framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java b/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java rename to test/framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java rename to test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java similarity index 97% rename from test-framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java rename to test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 47a77dfc9d2..969d59d885e 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -18,10 +18,7 @@ */ package org.elasticsearch.test.junit.listeners; -import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.ReproduceErrorMessageBuilder; -import com.carrotsearch.randomizedtesting.TraceFormatting; - import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -145,7 +142,7 @@ public class ReproduceInfoPrinter extends RunListener { // these properties only make sense for integration tests appendProperties("es.node.mode", "es.node.local", TESTS_CLUSTER, ESIntegTestCase.TESTS_ENABLE_MOCK_MODULES); } - appendProperties("tests.assertion.disabled", "tests.security.manager", "tests.nightly", "tests.jvms", + appendProperties("tests.assertion.disabled", "tests.security.manager", "tests.nightly", "tests.jvms", "tests.client.ratio", "tests.heap.size", "tests.bwc", "tests.bwc.version"); if (System.getProperty("tests.jvm.argline") != null && !System.getProperty("tests.jvm.argline").isEmpty()) { appendOpt("tests.jvm.argline", "\"" + System.getProperty("tests.jvm.argline") + "\""); diff --git a/test-framework/src/main/java/org/elasticsearch/test/junit/rule/RepeatOnExceptionRule.java b/test/framework/src/main/java/org/elasticsearch/test/junit/rule/RepeatOnExceptionRule.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/junit/rule/RepeatOnExceptionRule.java rename to test/framework/src/main/java/org/elasticsearch/test/junit/rule/RepeatOnExceptionRule.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java b/test/framework/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java similarity index 99% rename from test-framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 266f8e8038c..b4aecd52a14 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; - import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.LuceneTestCase.SuppressFsync; diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/Stash.java b/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/Stash.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestException.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestException.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/RestException.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/RestException.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java similarity index 96% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java index f6e3ddabd5e..ed1ce728c0b 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java @@ -19,7 +19,11 @@ package org.elasticsearch.test.rest.client; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; public class RestPath { private final List parts; @@ -94,4 +98,4 @@ public class RestPath { this.pathPart = pathPart; } } -} \ No newline at end of file +} diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestResponse.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/RestResponse.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/RestResponse.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpDeleteWithEntity.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpDeleteWithEntity.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpDeleteWithEntity.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpDeleteWithEntity.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpGetWithEntity.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpGetWithEntity.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpGetWithEntity.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpGetWithEntity.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java similarity index 95% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java index 34665efa0f1..e4c8849a92f 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java @@ -18,7 +18,13 @@ */ package org.elasticsearch.test.rest.client.http; -import org.apache.http.client.methods.*; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpOptions; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.elasticsearch.client.support.Headers; @@ -168,6 +174,7 @@ public class HttpRequestBuilder { logger.trace("sending request \n{}", stringBuilder.toString()); } for (Map.Entry entry : this.headers.entrySet()) { + logger.trace("adding header [{} => {}]", entry.getKey(), entry.getValue()); httpUriRequest.addHeader(entry.getKey(), entry.getValue()); } try (CloseableHttpResponse closeableHttpResponse = httpClient.execute(httpUriRequest)) { diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java similarity index 93% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java index 9f3a8b6df9d..3d5e21e5146 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java @@ -21,7 +21,9 @@ package org.elasticsearch.test.rest.section; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.equalToIgnoringCase; import static org.junit.Assert.assertThat; /** diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java similarity index 91% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java index aacb5f0a3bf..1a899c3cc2b 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java @@ -21,7 +21,10 @@ package org.elasticsearch.test.rest.section; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.equalToIgnoringCase; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; /** diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java similarity index 95% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java index 16efcae96c5..e00fbbea01c 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java @@ -24,7 +24,9 @@ import org.elasticsearch.common.logging.Loggers; import java.util.regex.Pattern; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; /** diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/SetSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/SetSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/SetSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/SetSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/TestSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/TestSection.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/section/TestSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/TestSection.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestApi.java b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApi.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestApi.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApi.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java b/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java b/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java diff --git a/test-framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java similarity index 95% rename from test-framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java rename to test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index 27a2e6fb22e..58a72789f65 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -21,10 +21,13 @@ package org.elasticsearch.test.store; import com.carrotsearch.randomizedtesting.SeedUtils; import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import java.nio.charset.StandardCharsets; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.store.*; +import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.LockFactory; +import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestRuleMarkFailure; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -35,11 +38,11 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; import org.junit.Assert; @@ -47,8 +50,11 @@ import org.junit.Assert; import java.io.Closeable; import java.io.IOException; import java.io.PrintStream; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.Random; public class MockFSDirectoryService extends FsDirectoryService { diff --git a/test-framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java similarity index 94% rename from test-framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java rename to test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java index 86cf0ddb563..3fe700701dd 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java @@ -25,7 +25,11 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardState; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStoreConfig; diff --git a/test-framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java similarity index 92% rename from test-framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java rename to test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java index 64cc401cb5f..8b395003576 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java @@ -23,6 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -30,7 +31,6 @@ import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; @@ -51,12 +51,12 @@ public class AssertingLocalTransport extends LocalTransport { public String description() { return "an asserting transport for testing"; } - public void onModule(TransportModule transportModule) { - transportModule.addTransport("mock", AssertingLocalTransport.class); + public void onModule(NetworkModule module) { + module.registerTransport("mock", AssertingLocalTransport.class); } @Override public Settings additionalSettings() { - return Settings.builder().put(TransportModule.TRANSPORT_TYPE_KEY, "mock").build(); + return Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "mock").build(); } } diff --git a/test-framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java similarity index 90% rename from test-framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java rename to test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java index 2363d98a113..229dc944b90 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java @@ -18,17 +18,27 @@ */ package org.elasticsearch.test.transport; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportServiceAdapter; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.BlockingQueue; /** A transport class that doesn't send anything but rather captures all requests for inspection from tests */ diff --git a/test-framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java similarity index 98% rename from test-framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java rename to test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index e1efd6c3745..0a8869b20cf 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -33,12 +34,12 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.RequestHandlerRegistry; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; @@ -75,12 +76,12 @@ public class MockTransportService extends TransportService { public String description() { return "a mock transport service for testing"; } - public void onModule(TransportModule transportModule) { - transportModule.addTransportService("mock", MockTransportService.class); + public void onModule(NetworkModule module) { + module.registerTransportService("mock", MockTransportService.class); } @Override public Settings additionalSettings() { - return Settings.builder().put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, "mock").build(); + return Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "mock").build(); } } diff --git a/test-framework/src/main/resources/log4j.properties b/test/framework/src/main/resources/log4j.properties similarity index 100% rename from test-framework/src/main/resources/log4j.properties rename to test/framework/src/main/resources/log4j.properties diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java similarity index 98% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java index cbe14f8e9cc..2a925dd2586 100644 --- a/test-framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.test.rest.test; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; import org.junit.After; -import org.junit.Ignore; import static org.hamcrest.Matchers.nullValue; diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java similarity index 95% rename from test-framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java rename to test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 1514e254a7f..c1cfa56c8be 100644 --- a/test-framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -27,9 +27,13 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.NodeConfigurationSource; -import java.io.IOException; import java.nio.file.Path; -import java.util.*; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Random; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; @@ -52,8 +56,8 @@ public class InternalTestClusterTests extends ESTestCase { String nodePrefix = randomRealisticUnicodeOfCodepointLengthBetween(1, 10); Path baseDir = createTempDir(); - InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, true); - InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, true); + InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); + InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); // TODO: this is not ideal - we should have a way to make sure ports are initialized in the same way assertClusters(cluster0, cluster1, false); @@ -110,8 +114,8 @@ public class InternalTestClusterTests extends ESTestCase { String nodePrefix = "foobar"; Path baseDir = createTempDir(); - InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, true); - InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName2, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, true); + InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); + InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName2, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); assertClusters(cluster0, cluster1, false); long seed = randomLong(); diff --git a/test-framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java rename to test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java b/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java similarity index 99% rename from test-framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java rename to test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java index 55d86aa4c9a..b9643dda7c7 100644 --- a/test-framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.test.test; import com.carrotsearch.randomizedtesting.annotations.Repeat; - import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.TestCluster; diff --git a/test-framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java b/test/framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java rename to test/framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java diff --git a/test-framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java similarity index 100% rename from test-framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java rename to test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java diff --git a/test-framework/src/test/resources/rest-api-spec/test/suite1/10_basic.yaml b/test/framework/src/test/resources/rest-api-spec/test/suite1/10_basic.yaml similarity index 100% rename from test-framework/src/test/resources/rest-api-spec/test/suite1/10_basic.yaml rename to test/framework/src/test/resources/rest-api-spec/test/suite1/10_basic.yaml diff --git a/test-framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yaml b/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yaml similarity index 100% rename from test-framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yaml rename to test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yaml diff --git a/test-framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yaml b/test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yaml similarity index 100% rename from test-framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yaml rename to test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yaml diff --git a/test-framework/src/test/resources/rest-api-spec/test/suite2/15_test2.yaml b/test/framework/src/test/resources/rest-api-spec/test/suite2/15_test2.yaml similarity index 100% rename from test-framework/src/test/resources/rest-api-spec/test/suite2/15_test2.yaml rename to test/framework/src/test/resources/rest-api-spec/test/suite2/15_test2.yaml