diff --git a/.dir-locals.el b/.dir-locals.el index 2fdca14f5dd..0728ce905dd 100644 --- a/.dir-locals.el +++ b/.dir-locals.el @@ -83,6 +83,6 @@ )) (c-basic-offset . 4) (c-comment-only-line-offset . (0 . 0)) - (fill-column . 140) - (fci-rule-column . 140) + (fill-column . 100) + (fci-rule-column . 100) (compile-command . "gradle compileTestJava")))) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 92b35e97baa..6a4531f1bde 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -11,3 +11,4 @@ attention. - If submitting code, have you built your formula locally prior to submission with `gradle check`? - If submitting code, is your pull request against master? Unless there is a good reason otherwise, we prefer pull requests against master and will backport as needed. - If submitting code, have you checked that your submission is for an [OS that we support](https://www.elastic.co/support/matrix#show_os)? +- If you are submitting this code for a class then read our [policy](https://github.com/elastic/elasticsearch/blob/master/CONTRIBUTING.md#contributing-as-part-of-a-class) for that. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5885bf9def7..8775e1464d0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -88,8 +88,8 @@ Contributing to the Elasticsearch codebase **Repository:** [https://github.com/elastic/elasticsearch](https://github.com/elastic/elasticsearch) Make sure you have [Gradle](http://gradle.org) installed, as -Elasticsearch uses it as its build system. Gradle must be version 2.13 _exactly_ in -order to build successfully. +Elasticsearch uses it as its build system. Gradle must be at least +version 3.3 in order to build successfully. Eclipse users can automatically configure their IDE: `gradle eclipse` then `File: Import: Existing Projects into Workspace`. Select the @@ -139,3 +139,32 @@ Before submitting your changes, run the test suite to make sure that nothing is ```sh gradle check ``` + +Contributing as part of a class +------------------------------- +In general Elasticsearch is happy to accept contributions that were created as +part of a class but strongly advise against making the contribution as part of +the class. So if you have code you wrote for a class feel free to submit it. + +Please, please, please do not assign contributing to Elasticsearch as part of a +class. If you really want to assign writing code for Elasticsearch as an +assignment then the code contributions should be made to your private clone and +opening PRs against the primary Elasticsearch clone must be optional, fully +voluntary, not for a grade, and without any deadlines. + +Because: + +* While the code review process is likely very educational, it can take wildly +varying amounts of time depending on who is available, where the change is, and +how deep the change is. There is no way to predict how long it will take unless +we rush. +* We do not rush reviews without a very, very good reason. Class deadlines +aren't a good enough reason for us to rush reviews. +* We deeply discourage opening a PR you don't intend to work through the entire +code review process because it wastes our time. +* We don't have the capacity to absorb an entire class full of new contributors, +especially when they are unlikely to become long time contributors. + +Finally, we require that you run `gradle check` before submitting a +non-documentation contribution. This is mentioned above, but it is worth +repeating in this section because it has come up in this context. diff --git a/README.textile b/README.textile index 5bc5b7f2505..9c2b2c5d91e 100644 --- a/README.textile +++ b/README.textile @@ -200,7 +200,7 @@ We have just covered a very small portion of what Elasticsearch is all about. Fo h3. Building from Source -Elasticsearch uses "Gradle":https://gradle.org for its build system. You'll need to have version 2.13 of Gradle installed. +Elasticsearch uses "Gradle":https://gradle.org for its build system. You'll need to have at least version 3.3 of Gradle installed. In order to create a distribution, simply run the @gradle assemble@ command in the cloned directory. diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index fcb504b83ce..6536c77e587 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -23,8 +23,8 @@ apply plugin: 'groovy' group = 'org.elasticsearch.gradle' -if (GradleVersion.current() < GradleVersion.version('2.13')) { - throw new GradleException('Gradle 2.13+ is required to build elasticsearch') +if (GradleVersion.current() < GradleVersion.version('3.3')) { + throw new GradleException('Gradle 3.3+ is required to build elasticsearch') } if (JavaVersion.current() < JavaVersion.VERSION_1_8) { @@ -96,23 +96,12 @@ dependencies { compile 'org.apache.rat:apache-rat:0.11' } -// Gradle version-specific options (allows build to run with Gradle 2.13 as well as 2.14+/3.+) -if (GradleVersion.current() == GradleVersion.version("2.13")) { - // ProgressLogger(-Factory) classes are part of the public Gradle API - sourceSets.main.groovy.srcDir 'src/main/gradle-2.13-groovy' +// Gradle 2.14+ removed ProgressLogger(-Factory) classes from the public APIs +// Use logging dependency instead - dependencies { - compile 'ru.vyarus:gradle-animalsniffer-plugin:1.0.1' // last version compatible with Gradle 2.13 - } -} else { - // Gradle 2.14+ removed ProgressLogger(-Factory) classes from the public APIs - // Use logging dependency instead - sourceSets.main.groovy.srcDir 'src/main/gradle-2.14-groovy' - - dependencies { - compileOnly "org.gradle:gradle-logging:${GradleVersion.current().getVersion()}" - compile 'ru.vyarus:gradle-animalsniffer-plugin:1.2.0' // Gradle 2.14 requires a version > 1.0.1 - } +dependencies { + compileOnly "org.gradle:gradle-logging:${GradleVersion.current().getVersion()}" + compile 'ru.vyarus:gradle-animalsniffer-plugin:1.2.0' // Gradle 2.14 requires a version > 1.0.1 } /***************************************************************************** diff --git a/buildSrc/src/main/gradle-2.13-groovy/org/elasticsearch/gradle/ProgressLogger.groovy b/buildSrc/src/main/gradle-2.13-groovy/org/elasticsearch/gradle/ProgressLogger.groovy deleted file mode 100644 index 5c02e255a1a..00000000000 --- a/buildSrc/src/main/gradle-2.13-groovy/org/elasticsearch/gradle/ProgressLogger.groovy +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.gradle - -/** - * Wraps a ProgressLogger so that code in src/main/groovy does not need to - * define imports on Gradle 2.13/2.14+ ProgressLoggers - */ -class ProgressLogger { - @Delegate org.gradle.logging.ProgressLogger progressLogger - - ProgressLogger(org.gradle.logging.ProgressLogger progressLogger) { - this.progressLogger = progressLogger - } -} diff --git a/buildSrc/src/main/gradle-2.13-groovy/org/elasticsearch/gradle/ProgressLoggerFactoryInjection.groovy b/buildSrc/src/main/gradle-2.13-groovy/org/elasticsearch/gradle/ProgressLoggerFactoryInjection.groovy deleted file mode 100644 index 290c4d581d6..00000000000 --- a/buildSrc/src/main/gradle-2.13-groovy/org/elasticsearch/gradle/ProgressLoggerFactoryInjection.groovy +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.gradle - -import org.gradle.logging.ProgressLoggerFactory - -import javax.inject.Inject - -/** - * Allows to inject a ProgressLoggerFactory to tasks in src/main/groovy - * without requiring the corresponding import of ProgressLoggerFactory, - * making it compatible with both Gradle 2.13 and 2.14+. - */ -trait ProgressLoggerFactoryInjection { - @Inject - ProgressLoggerFactory getProgressLoggerFactory() { - throw new UnsupportedOperationException() - } -} diff --git a/buildSrc/src/main/gradle-2.14-groovy/org/elasticsearch/gradle/ProgressLogger.groovy b/buildSrc/src/main/gradle-2.14-groovy/org/elasticsearch/gradle/ProgressLogger.groovy deleted file mode 100644 index 2c9fab78b43..00000000000 --- a/buildSrc/src/main/gradle-2.14-groovy/org/elasticsearch/gradle/ProgressLogger.groovy +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.gradle - -/** - * Wraps a ProgressLogger so that code in src/main/groovy does not need to - * define imports on Gradle 2.13/2.14+ ProgressLoggers - */ -class ProgressLogger { - @Delegate org.gradle.internal.logging.progress.ProgressLogger progressLogger - - ProgressLogger(org.gradle.internal.logging.progress.ProgressLogger progressLogger) { - this.progressLogger = progressLogger - } -} diff --git a/buildSrc/src/main/gradle-2.14-groovy/org/elasticsearch/gradle/ProgressLoggerFactoryInjection.groovy b/buildSrc/src/main/gradle-2.14-groovy/org/elasticsearch/gradle/ProgressLoggerFactoryInjection.groovy deleted file mode 100644 index 8891d65611a..00000000000 --- a/buildSrc/src/main/gradle-2.14-groovy/org/elasticsearch/gradle/ProgressLoggerFactoryInjection.groovy +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.gradle - -import org.gradle.internal.logging.progress.ProgressLoggerFactory - -import javax.inject.Inject - -/** - * Allows to inject a ProgressLoggerFactory to tasks in src/main/groovy - * without requiring the corresponding import of ProgressLoggerFactory, - * making it compatible with both Gradle 2.13 and 2.14+. - */ -trait ProgressLoggerFactoryInjection { - @Inject - ProgressLoggerFactory getProgressLoggerFactory() { - throw new UnsupportedOperationException() - } -} diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy index 8d93301e0c7..e24c226837d 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy @@ -8,7 +8,6 @@ import org.apache.tools.ant.BuildException import org.apache.tools.ant.DefaultLogger import org.apache.tools.ant.RuntimeConfigurable import org.apache.tools.ant.UnknownElement -import org.elasticsearch.gradle.ProgressLoggerFactoryInjection import org.gradle.api.DefaultTask import org.gradle.api.file.FileCollection import org.gradle.api.file.FileTreeElement @@ -20,9 +19,12 @@ import org.gradle.api.tasks.Optional import org.gradle.api.tasks.TaskAction import org.gradle.api.tasks.util.PatternFilterable import org.gradle.api.tasks.util.PatternSet +import org.gradle.internal.logging.progress.ProgressLoggerFactory import org.gradle.util.ConfigureUtil -class RandomizedTestingTask extends DefaultTask implements ProgressLoggerFactoryInjection { +import javax.inject.Inject + +class RandomizedTestingTask extends DefaultTask { // TODO: change to "executable" to match gradle test params? @Optional @@ -92,6 +94,11 @@ class RandomizedTestingTask extends DefaultTask implements ProgressLoggerFactory listenersConfig.listeners.add(new TestReportLogger(logger: logger, config: testLoggingConfig)) } + @Inject + ProgressLoggerFactory getProgressLoggerFactory() { + throw new UnsupportedOperationException() + } + void jvmArgs(Iterable arguments) { jvmArgs.addAll(arguments) } diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy index a9786935c56..da25afa9389 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy @@ -25,7 +25,8 @@ import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedStartEvent import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedSuiteResultEvent import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedTestResultEvent import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener -import org.elasticsearch.gradle.ProgressLogger +import org.gradle.internal.logging.progress.ProgressLogger +import org.gradle.internal.logging.progress.ProgressLoggerFactory import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDurationInSeconds import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.ERROR @@ -51,6 +52,8 @@ import static java.lang.Math.max * quick. */ class TestProgressLogger implements AggregatedEventListener { + /** Factory to build a progress logger when testing starts */ + ProgressLoggerFactory factory ProgressLogger progressLogger int totalSuites int totalSlaves @@ -74,17 +77,14 @@ class TestProgressLogger implements AggregatedEventListener { /** Have we finished a whole suite yet? */ volatile boolean suiteFinished = false /* Note that we probably overuse volatile here but it isn't hurting us and - lets us move things around without worying about breaking things. */ - - TestProgressLogger(Map args) { - progressLogger = new ProgressLogger(args.factory.newOperation(TestProgressLogger)) - progressLogger.setDescription('Randomized test runner') - } + lets us move things around without worrying about breaking things. */ @Subscribe void onStart(AggregatedStartEvent e) throws IOException { totalSuites = e.suiteCount totalSlaves = e.slaveCount + progressLogger = factory.newOperation(TestProgressLogger) + progressLogger.setDescription('Randomized test runner') progressLogger.started() progressLogger.progress( "Starting JUnit4 for ${totalSuites} suites on ${totalSlaves} jvms") diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 011ac94cf2e..ad1445759a1 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -122,7 +122,7 @@ class BuildPlugin implements Plugin { } // enforce gradle version - GradleVersion minGradle = GradleVersion.version('2.13') + GradleVersion minGradle = GradleVersion.version('3.3') if (GradleVersion.current() < minGradle) { throw new GradleException("${minGradle} or above is required to build elasticsearch") } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 018f9fde2f2..33ca6dccfa3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -209,9 +209,11 @@ public class ThirdPartyAuditTask extends AntTask { try { ant.thirdPartyAudit(failOnUnsupportedJava: false, failOnMissingClasses: false, - signaturesFile: new File(getClass().getResource('/forbidden/third-party-audit.txt').toURI()), classpath: classpath.asPath) { fileset(dir: tmpDir) + signatures { + string(value: getClass().getResourceAsStream('/forbidden/third-party-audit.txt').getText('UTF-8')) + } } } catch (BuildException ignore) {} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index 8d65f8c0d60..4a884735259 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -125,6 +125,8 @@ class ClusterConfiguration { Map settings = new HashMap<>() + Map keystoreSettings = new HashMap<>() + // map from destination path, to source file Map extraConfigFiles = new HashMap<>() @@ -144,6 +146,11 @@ class ClusterConfiguration { settings.put(name, value) } + @Input + void keystoreSetting(String name, String value) { + keystoreSettings.put(name, value) + } + @Input void plugin(String path) { Project pluginProject = project.project(path) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index c3dff77dfd4..e8061b02f3d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -38,6 +38,7 @@ import org.gradle.api.tasks.Copy import org.gradle.api.tasks.Delete import org.gradle.api.tasks.Exec +import java.nio.charset.StandardCharsets import java.nio.file.Paths import java.util.concurrent.TimeUnit @@ -157,10 +158,14 @@ class ClusterFormationTasks { node.cwd.mkdirs() } } + setup = configureCheckPreviousTask(taskName(prefix, node, 'checkPrevious'), project, setup, node) setup = configureStopTask(taskName(prefix, node, 'stopPrevious'), project, setup, node) setup = configureExtractTask(taskName(prefix, node, 'extract'), project, setup, node, configuration) setup = configureWriteConfigTask(taskName(prefix, node, 'configure'), project, setup, node, seedNode) + setup = configureCreateKeystoreTask(taskName(prefix, node, 'createKeystore'), project, setup, node) + setup = configureAddKeystoreSettingTasks(prefix, project, setup, node) + if (node.config.plugins.isEmpty() == false) { if (node.nodeVersion == VersionProperties.elasticsearch) { setup = configureCopyPluginsTask(taskName(prefix, node, 'copyPlugins'), project, setup, node) @@ -303,6 +308,33 @@ class ClusterFormationTasks { } } + /** Adds a task to create keystore */ + static Task configureCreateKeystoreTask(String name, Project project, Task setup, NodeInfo node) { + if (node.config.keystoreSettings.isEmpty()) { + return setup + } else { + File esKeystoreUtil = Paths.get(node.homeDir.toString(), "bin/" + "elasticsearch-keystore").toFile() + return configureExecTask(name, project, setup, node, esKeystoreUtil, 'create') + } + } + + /** Adds tasks to add settings to the keystore */ + static Task configureAddKeystoreSettingTasks(String parent, Project project, Task setup, NodeInfo node) { + Map kvs = node.config.keystoreSettings + File esKeystoreUtil = Paths.get(node.homeDir.toString(), "bin/" + "elasticsearch-keystore").toFile() + Task parentTask = setup + for (Map.Entry entry in kvs) { + String key = entry.getKey() + String name = taskName(parent, node, 'addToKeystore#' + key) + Task t = configureExecTask(name, project, parentTask, node, esKeystoreUtil, 'add', key, '-x') + t.doFirst { + standardInput = new ByteArrayInputStream(entry.getValue().getBytes(StandardCharsets.UTF_8)) + } + parentTask = t + } + return parentTask + } + static Task configureExtraConfigFilesTask(String name, Project project, Task setup, NodeInfo node) { if (node.config.extraConfigFiles.isEmpty()) { return setup diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 075e8129e6f..98ee91e37a8 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -22,10 +22,15 @@ import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.elasticsearch.gradle.BuildPlugin import org.gradle.api.DefaultTask import org.gradle.api.Task +import org.gradle.api.execution.TaskExecutionAdapter import org.gradle.api.internal.tasks.options.Option import org.gradle.api.plugins.JavaBasePlugin import org.gradle.api.tasks.Input -import org.gradle.util.ConfigureUtil +import org.gradle.api.tasks.TaskState + +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import java.util.stream.Stream /** * A wrapper task around setting up a cluster and running rest tests. @@ -71,6 +76,24 @@ public class RestIntegTestTask extends DefaultTask { // both as separate sysprops runner.systemProperty('tests.cluster', "${-> nodes[0].transportUri()}") + // dump errors and warnings from cluster log on failure + TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() { + @Override + void afterExecute(Task task, TaskState state) { + if (state.failure != null) { + for (NodeInfo nodeInfo : nodes) { + printLogExcerpt(nodeInfo) + } + } + } + } + runner.doFirst { + project.gradle.addListener(logDumpListener) + } + runner.doLast { + project.gradle.removeListener(logDumpListener) + } + // copy the rest spec/tests into the test resources RestSpecHack.configureDependencies(project) project.afterEvaluate { @@ -126,4 +149,42 @@ public class RestIntegTestTask extends DefaultTask { public Task mustRunAfter(Object... tasks) { clusterInit.mustRunAfter(tasks) } + + /** Print out an excerpt of the log from the given node. */ + protected static void printLogExcerpt(NodeInfo nodeInfo) { + File logFile = new File(nodeInfo.homeDir, "logs/${nodeInfo.clusterName}.log") + println("\nCluster ${nodeInfo.clusterName} - node ${nodeInfo.nodeNum} log excerpt:") + println("(full log at ${logFile})") + println('-----------------------------------------') + Stream stream = Files.lines(logFile.toPath(), StandardCharsets.UTF_8) + try { + boolean inStartup = true + boolean inExcerpt = false + int linesSkipped = 0 + for (String line : stream) { + if (line.startsWith("[")) { + inExcerpt = false // clear with the next log message + } + if (line =~ /(\[WARN\])|(\[ERROR\])/) { + inExcerpt = true // show warnings and errors + } + if (inStartup || inExcerpt) { + if (linesSkipped != 0) { + println("... SKIPPED ${linesSkipped} LINES ...") + } + println(line) + linesSkipped = 0 + } else { + ++linesSkipped + } + if (line =~ /recovered \[\d+\] indices into cluster_state/) { + inStartup = false + } + } + } finally { + stream.close() + } + println('=========================================') + + } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy index 85fd433bc77..e15759a1fe5 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy @@ -19,10 +19,9 @@ package org.elasticsearch.gradle.vagrant import com.carrotsearch.gradle.junit4.LoggingOutputStream -import groovy.transform.PackageScope -import org.elasticsearch.gradle.ProgressLogger import org.gradle.api.GradleScriptException import org.gradle.api.logging.Logger +import org.gradle.internal.logging.progress.ProgressLogger import java.util.regex.Matcher @@ -48,7 +47,7 @@ public class TapLoggerOutputStream extends LoggingOutputStream { TapLoggerOutputStream(Map args) { logger = args.logger - progressLogger = new ProgressLogger(args.factory.newOperation(VagrantLoggerOutputStream)) + progressLogger = args.factory.newOperation(VagrantLoggerOutputStream) progressLogger.setDescription("TAP output for `${args.command}`") } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy index cd4d4bf87a5..abc6af9e09d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy @@ -19,15 +19,17 @@ package org.elasticsearch.gradle.vagrant import org.apache.commons.io.output.TeeOutputStream -import org.elasticsearch.gradle.ProgressLoggerFactoryInjection import org.elasticsearch.gradle.LoggedExec import org.gradle.api.tasks.Input +import org.gradle.internal.logging.progress.ProgressLoggerFactory + +import javax.inject.Inject /** * Runs a vagrant command. Pretty much like Exec task but with a nicer output * formatter and defaults to `vagrant` as first part of commandLine. */ -public class VagrantCommandTask extends LoggedExec implements ProgressLoggerFactoryInjection { +public class VagrantCommandTask extends LoggedExec { @Input String boxName @@ -47,6 +49,11 @@ public class VagrantCommandTask extends LoggedExec implements ProgressLoggerFact } } + @Inject + ProgressLoggerFactory getProgressLoggerFactory() { + throw new UnsupportedOperationException() + } + protected OutputStream createLoggerOutputStream() { return new VagrantLoggerOutputStream( command: commandLine.join(' '), diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy index de6c5a36db9..e899c017129 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy @@ -19,7 +19,7 @@ package org.elasticsearch.gradle.vagrant import com.carrotsearch.gradle.junit4.LoggingOutputStream -import org.elasticsearch.gradle.ProgressLogger +import org.gradle.internal.logging.progress.ProgressLogger /** * Adapts an OutputStream being written to by vagrant into a ProcessLogger. It @@ -53,7 +53,7 @@ public class VagrantLoggerOutputStream extends LoggingOutputStream { private String heading = '' VagrantLoggerOutputStream(Map args) { - progressLogger = new ProgressLogger(args.factory.newOperation(VagrantLoggerOutputStream)) + progressLogger = args.factory.newOperation(VagrantLoggerOutputStream) progressLogger.setDescription("Vagrant output for `$args.command`") squashedPrefix = args.squashedPrefix } diff --git a/buildSrc/src/main/resources/checkstyle.xml b/buildSrc/src/main/resources/checkstyle.xml index 891a85d50a9..85b55a71cf8 100644 --- a/buildSrc/src/main/resources/checkstyle.xml +++ b/buildSrc/src/main/resources/checkstyle.xml @@ -22,7 +22,7 @@ suppress the check there but enforce it everywhere else. This prevents the list from getting longer even if it is unfair. --> - + diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 85a658df5ac..ab0a75a007a 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -10,547 +10,2416 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + - - + + + + + + + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + - - - + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + - + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + - + + + + + + + + + + - + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -562,37 +2431,69 @@ - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -602,6 +2503,8 @@ + + @@ -610,159 +2513,443 @@ + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + - + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + - - - - + + + + + + + + + + + + + - + + + + + + - - - - + + + + + + + + + + - - - - - + + - - - - - + + + + + + + + + + + + + - - - + + + + + + + + + + - + - - - - - + + + + + + + + + - + + + + + + + + + - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + @@ -771,189 +2958,1186 @@ + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + - - + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - + + - + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.core.prefs b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.core.prefs index 9bee5e587b0..e30b8df6cc4 100644 --- a/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.core.prefs +++ b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.core.prefs @@ -1,6 +1,5 @@ eclipse.preferences.version=1 -# previous configuration from maven build # this is merged with gradle's generated properties during 'gradle eclipse' # NOTE: null pointer analysis etc is not enabled currently, it seems very unstable @@ -17,6 +16,6 @@ eclipse.preferences.version=1 # org.eclipse.jdt.core.compiler.problem.potentialNullReference=warning org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning -org.eclipse.jdt.core.formatter.lineSplit=140 +org.eclipse.jdt.core.formatter.lineSplit=100 org.eclipse.jdt.core.formatter.tabulation.char=space org.eclipse.jdt.core.formatter.tabulation.size=4 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 77157d2f3c8..8938a0fdfab 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -10,7 +10,7 @@ snakeyaml = 1.15 # When updating log4j, please update also docs/java-api/index.asciidoc log4j = 2.7 slf4j = 1.6.2 -jna = 4.2.2 +jna = 4.4.0 # test dependencies randomizedrunner = 2.5.0 diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java index ddb96cdc00e..00c19019f47 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java @@ -56,35 +56,35 @@ public class DeleteDocumentationIT extends ESRestHighLevelClientTestCase { public void testDelete() throws IOException { RestHighLevelClient client = highLevelClient(); - // tag::delete-request[] + // tag::delete-request DeleteRequest request = new DeleteRequest( "index", // <1> "type", // <2> "id"); // <3> - // end::delete-request[] + // end::delete-request - // tag::delete-request-props[] + // tag::delete-request-props request.timeout(TimeValue.timeValueSeconds(1)); // <1> request.timeout("1s"); // <2> request.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL); // <3> request.setRefreshPolicy("wait_for"); // <4> request.version(2); // <5> request.versionType(VersionType.EXTERNAL); // <6> - // end::delete-request-props[] + // end::delete-request-props - // tag::delete-execute[] + // tag::delete-execute DeleteResponse response = client.delete(request); - // end::delete-execute[] + // end::delete-execute try { - // tag::delete-notfound[] + // tag::delete-notfound if (response.getResult().equals(DocWriteResponse.Result.NOT_FOUND)) { throw new Exception("Can't find document to be removed"); // <1> } - // end::delete-notfound[] + // end::delete-notfound } catch (Exception ignored) { } - // tag::delete-execute-async[] + // tag::delete-execute-async client.deleteAsync(request, new ActionListener() { @Override public void onResponse(DeleteResponse deleteResponse) { @@ -96,9 +96,9 @@ public class DeleteDocumentationIT extends ESRestHighLevelClientTestCase { // <2> } }); - // end::delete-execute-async[] + // end::delete-execute-async - // tag::delete-conflict[] + // tag::delete-conflict try { client.delete(request); } catch (ElasticsearchException exception) { @@ -106,7 +106,7 @@ public class DeleteDocumentationIT extends ESRestHighLevelClientTestCase { // <1> } } - // end::delete-conflict[] + // end::delete-conflict } } diff --git a/core/licenses/jna-4.2.2.jar.sha1 b/core/licenses/jna-4.2.2.jar.sha1 deleted file mode 100644 index 8b1acbbe5d7..00000000000 --- a/core/licenses/jna-4.2.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5012450aee579c3118ff09461d5ce210e0cdc2a9 \ No newline at end of file diff --git a/core/licenses/jna-4.4.0.jar.sha1 b/core/licenses/jna-4.4.0.jar.sha1 new file mode 100644 index 00000000000..9655b2c92e8 --- /dev/null +++ b/core/licenses/jna-4.4.0.jar.sha1 @@ -0,0 +1 @@ +cb208278274bf12ebdb56c61bd7407e6f774d65a \ No newline at end of file diff --git a/core/src/main/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScanner.java b/core/src/main/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScanner.java new file mode 100644 index 00000000000..1cd5fb9340d --- /dev/null +++ b/core/src/main/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScanner.java @@ -0,0 +1,171 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.lucene.search.uhighlight; + +import java.text.BreakIterator; +import java.text.CharacterIterator; +import java.util.Locale; + +/** + * A custom break iterator that scans text to find break-delimited passages bounded by + * a provided maximum length. This class delegates the boundary search to a first level + * break iterator. When this break iterator finds a passage greater than the maximum length + * a secondary break iterator is used to re-split the passage at the first boundary after + * maximum length. + * This is useful to split passages created by {@link BreakIterator}s like `sentence` that + * can create big outliers on semi-structured text. + * + * WARNING: This break iterator is designed to work with the {@link UnifiedHighlighter}. + **/ +public class BoundedBreakIteratorScanner extends BreakIterator { + private final BreakIterator mainBreak; + private final BreakIterator innerBreak; + private final int maxLen; + + private int lastPrecedingOffset = -1; + private int windowStart = -1; + private int windowEnd = -1; + private int innerStart = -1; + private int innerEnd = 0; + + private BoundedBreakIteratorScanner(BreakIterator mainBreak, + BreakIterator innerBreak, + int maxLen) { + this.mainBreak = mainBreak; + this.innerBreak = innerBreak; + this.maxLen = maxLen; + } + + @Override + public CharacterIterator getText() { + return mainBreak.getText(); + } + + @Override + public void setText(CharacterIterator newText) { + reset(); + mainBreak.setText(newText); + innerBreak.setText(newText); + } + + @Override + public void setText(String newText) { + reset(); + mainBreak.setText(newText); + innerBreak.setText(newText); + } + + private void reset() { + lastPrecedingOffset = -1; + windowStart = -1; + windowEnd = -1; + innerStart = -1; + innerEnd = 0; + } + + /** + * Must be called with increasing offset. See {@link FieldHighlighter} for usage. + */ + @Override + public int preceding(int offset) { + if (offset < lastPrecedingOffset) { + throw new IllegalArgumentException("offset < lastPrecedingOffset: " + + "usage doesn't look like UnifiedHighlighter"); + } + if (offset > windowStart && offset < windowEnd) { + innerStart = innerEnd; + innerEnd = windowEnd; + } else { + windowStart = innerStart = mainBreak.preceding(offset); + windowEnd = innerEnd = mainBreak.following(offset-1); + } + + if (innerEnd - innerStart > maxLen) { + // the current split is too big, + // so starting from the current term we try to find boundaries on the left first + if (offset - maxLen > innerStart) { + innerStart = Math.max(innerStart, + innerBreak.preceding(offset - maxLen)); + } + // and then we try to expand the passage to the right with the remaining size + int remaining = Math.max(0, maxLen - (offset - innerStart)); + if (offset + remaining < windowEnd) { + innerEnd = Math.min(windowEnd, + innerBreak.following(offset + remaining)); + } + } + lastPrecedingOffset = offset - 1; + return innerStart; + } + + /** + * Can be invoked only after a call to preceding(offset+1). + * See {@link FieldHighlighter} for usage. + */ + @Override + public int following(int offset) { + if (offset != lastPrecedingOffset || innerEnd == -1) { + throw new IllegalArgumentException("offset != lastPrecedingOffset: " + + "usage doesn't look like UnifiedHighlighter"); + } + return innerEnd; + } + + /** + * Returns a {@link BreakIterator#getSentenceInstance(Locale)} bounded to maxLen. + * Secondary boundaries are found using a {@link BreakIterator#getWordInstance(Locale)}. + */ + public static BreakIterator getSentence(Locale locale, int maxLen) { + final BreakIterator sBreak = BreakIterator.getSentenceInstance(locale); + final BreakIterator wBreak = BreakIterator.getWordInstance(locale); + return new BoundedBreakIteratorScanner(sBreak, wBreak, maxLen); + } + + + @Override + public int current() { + // Returns the last offset of the current split + return this.innerEnd; + } + + @Override + public int first() { + throw new IllegalStateException("first() should not be called in this context"); + } + + @Override + public int next() { + throw new IllegalStateException("next() should not be called in this context"); + } + + @Override + public int last() { + throw new IllegalStateException("last() should not be called in this context"); + } + + @Override + public int next(int n) { + throw new IllegalStateException("next(n) should not be called in this context"); + } + + @Override + public int previous() { + throw new IllegalStateException("previous() should not be called in this context"); + } +} diff --git a/core/src/main/java/org/apache/lucene/search/uhighlight/CustomFieldHighlighter.java b/core/src/main/java/org/apache/lucene/search/uhighlight/CustomFieldHighlighter.java new file mode 100644 index 00000000000..915e7cc1531 --- /dev/null +++ b/core/src/main/java/org/apache/lucene/search/uhighlight/CustomFieldHighlighter.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.search.uhighlight; + +import java.text.BreakIterator; +import java.util.Locale; + +import static org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter.MULTIVAL_SEP_CHAR; + +/** + * Custom {@link FieldHighlighter} that creates a single passage bounded to {@code noMatchSize} when + * no highlights were found. + */ +class CustomFieldHighlighter extends FieldHighlighter { + private static final Passage[] EMPTY_PASSAGE = new Passage[0]; + + private final Locale breakIteratorLocale; + private final int noMatchSize; + private final String fieldValue; + + CustomFieldHighlighter(String field, FieldOffsetStrategy fieldOffsetStrategy, + Locale breakIteratorLocale, BreakIterator breakIterator, + PassageScorer passageScorer, int maxPassages, int maxNoHighlightPassages, + PassageFormatter passageFormatter, int noMatchSize, String fieldValue) { + super(field, fieldOffsetStrategy, breakIterator, passageScorer, maxPassages, + maxNoHighlightPassages, passageFormatter); + this.breakIteratorLocale = breakIteratorLocale; + this.noMatchSize = noMatchSize; + this.fieldValue = fieldValue; + } + + @Override + protected Passage[] getSummaryPassagesNoHighlight(int maxPassages) { + if (noMatchSize > 0) { + int pos = 0; + while (pos < fieldValue.length() && fieldValue.charAt(pos) == MULTIVAL_SEP_CHAR) { + pos ++; + } + if (pos < fieldValue.length()) { + int end = fieldValue.indexOf(MULTIVAL_SEP_CHAR, pos); + if (end == -1) { + end = fieldValue.length(); + } + if (noMatchSize+pos < end) { + BreakIterator bi = BreakIterator.getWordInstance(breakIteratorLocale); + bi.setText(fieldValue); + // Finds the next word boundary **after** noMatchSize. + end = bi.following(noMatchSize + pos); + if (end == BreakIterator.DONE) { + end = fieldValue.length(); + } + } + Passage passage = new Passage(); + passage.setScore(Float.NaN); + passage.setStartOffset(pos); + passage.setEndOffset(end); + return new Passage[]{passage}; + } + } + return EMPTY_PASSAGE; + } +} diff --git a/core/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/core/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java index 4f1ec5fdb83..4a20fb0478f 100644 --- a/core/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/core/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -33,6 +33,8 @@ import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lucene.all.AllTermQuery; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; @@ -47,6 +49,7 @@ import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; /** * Subclass of the {@link UnifiedHighlighter} that works for a single field in a single document. @@ -57,37 +60,41 @@ import java.util.Map; * Supports both returning empty snippets and non highlighted snippets when no highlighting can be performed. */ public class CustomUnifiedHighlighter extends UnifiedHighlighter { + public static final char MULTIVAL_SEP_CHAR = (char) 0; private static final Snippet[] EMPTY_SNIPPET = new Snippet[0]; private final String fieldValue; private final PassageFormatter passageFormatter; private final BreakIterator breakIterator; - private final boolean returnNonHighlightedSnippets; + private final Locale breakIteratorLocale; + private final int noMatchSize; /** * Creates a new instance of {@link CustomUnifiedHighlighter} * * @param analyzer the analyzer used for the field at index time, used for multi term queries internally * @param passageFormatter our own {@link CustomPassageFormatter} - * which generates snippets in forms of {@link Snippet} objects + * which generates snippets in forms of {@link Snippet} objects + * @param breakIteratorLocale the {@link Locale} to use for dividing text into passages. + * If null {@link Locale#ROOT} is used * @param breakIterator the {@link BreakIterator} to use for dividing text into passages. - * If null {@link BreakIterator#getSentenceInstance(Locale)} is used. - * @param fieldValue the original field values as constructor argument, loaded from the _source field or - * the relevant stored field. - * @param returnNonHighlightedSnippets whether non highlighted snippets should be - * returned rather than empty snippets when no highlighting can be performed + * If null {@link BreakIterator#getSentenceInstance(Locale)} is used. + * @param fieldValue the original field values delimited by MULTIVAL_SEP_CHAR + * @param noMatchSize The size of the text that should be returned when no highlighting can be performed */ public CustomUnifiedHighlighter(IndexSearcher searcher, Analyzer analyzer, PassageFormatter passageFormatter, + @Nullable Locale breakIteratorLocale, @Nullable BreakIterator breakIterator, String fieldValue, - boolean returnNonHighlightedSnippets) { + int noMatchSize) { super(searcher, analyzer); this.breakIterator = breakIterator; + this.breakIteratorLocale = breakIteratorLocale == null ? Locale.ROOT : breakIteratorLocale; this.passageFormatter = passageFormatter; this.fieldValue = fieldValue; - this.returnNonHighlightedSnippets = returnNonHighlightedSnippets; + this.noMatchSize = noMatchSize; } /** @@ -111,16 +118,13 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter { @Override protected List loadFieldValues(String[] fields, DocIdSetIterator docIter, int cacheCharsThreshold) throws IOException { - //we only highlight one field, one document at a time + // we only highlight one field, one document at a time return Collections.singletonList(new String[]{fieldValue}); } @Override protected BreakIterator getBreakIterator(String field) { - if (breakIterator != null) { - return breakIterator; - } - return super.getBreakIterator(field); + return breakIterator; } @Override @@ -129,11 +133,18 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter { } @Override - protected int getMaxNoHighlightPassages(String field) { - if (returnNonHighlightedSnippets) { - return 1; - } - return 0; + protected FieldHighlighter getFieldHighlighter(String field, Query query, Set allTerms, int maxPassages) { + BytesRef[] terms = filterExtractedTerms(getFieldMatcher(field), allTerms); + Set highlightFlags = getFlags(field); + PhraseHelper phraseHelper = getPhraseHelper(field, query, highlightFlags); + CharacterRunAutomaton[] automata = getAutomata(field, query, highlightFlags); + OffsetSource offsetSource = getOptimizedOffsetSource(field, terms, phraseHelper, automata); + BreakIterator breakIterator = new SplittingBreakIterator(getBreakIterator(field), + UnifiedHighlighter.MULTIVAL_SEP_CHAR); + FieldOffsetStrategy strategy = + getOffsetStrategy(offsetSource, field, terms, phraseHelper, automata, highlightFlags); + return new CustomFieldHighlighter(field, strategy, breakIteratorLocale, breakIterator, + getScorer(field), maxPassages, (noMatchSize > 0 ? 1 : 0), getFormatter(field), noMatchSize, fieldValue); } @Override @@ -146,7 +157,6 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter { return rewriteCustomQuery(query); } - /** * Translate custom queries in queries that are supported by the unified highlighter. */ diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java index 6da503ef828..f03bb49fdae 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.validate.query; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -27,20 +28,26 @@ import java.io.IOException; public class QueryExplanation implements Streamable { + public static final int RANDOM_SHARD = -1; + private String index; - + + private int shard = RANDOM_SHARD; + private boolean valid; - + private String explanation; - + private String error; QueryExplanation() { - + } - - public QueryExplanation(String index, boolean valid, String explanation, String error) { + + public QueryExplanation(String index, int shard, boolean valid, String explanation, + String error) { this.index = index; + this.shard = shard; this.valid = valid; this.explanation = explanation; this.error = error; @@ -50,6 +57,10 @@ public class QueryExplanation implements Streamable { return this.index; } + public int getShard() { + return this.shard; + } + public boolean isValid() { return this.valid; } @@ -65,6 +76,11 @@ public class QueryExplanation implements Streamable { @Override public void readFrom(StreamInput in) throws IOException { index = in.readString(); + if (in.getVersion().onOrAfter(Version.V_5_4_0_UNRELEASED)) { + shard = in.readInt(); + } else { + shard = RANDOM_SHARD; + } valid = in.readBoolean(); explanation = in.readOptionalString(); error = in.readOptionalString(); @@ -73,6 +89,9 @@ public class QueryExplanation implements Streamable { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); + if (out.getVersion().onOrAfter(Version.V_5_4_0_UNRELEASED)) { + out.writeInt(shard); + } out.writeBoolean(valid); out.writeOptionalString(explanation); out.writeOptionalString(error); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index b80b721149c..3a13915b3aa 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -89,8 +89,14 @@ public class TransportValidateQueryAction extends TransportBroadcastAction> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, Integer.toString(Randomness.get().nextInt(1000)), request.indices()); + final String routing; + if (request.allShards()) { + routing = null; + } else { + // Random routing to limit request to a single shard + routing = Integer.toString(Randomness.get().nextInt(1000)); + } + Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, routing, request.indices()); return clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, "_local"); } @@ -124,12 +130,13 @@ public class TransportValidateQueryAction extends TransportBroadcastAction(); } queryExplanations.add(new QueryExplanation( validateQueryResponse.getIndex(), + request.allShards() ? validateQueryResponse.getShardId().getId() : QueryExplanation.RANDOM_SHARD, validateQueryResponse.isValid(), validateQueryResponse.getExplanation(), validateQueryResponse.getError() diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java index 41ef37ad621..18ccf1ede7d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.validate.query; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.IndicesOptions; @@ -43,6 +44,7 @@ public class ValidateQueryRequest extends BroadcastRequest private boolean explain; private boolean rewrite; + private boolean allShards; private String[] types = Strings.EMPTY_ARRAY; @@ -125,6 +127,20 @@ public class ValidateQueryRequest extends BroadcastRequest return rewrite; } + /** + * Indicates whether the query should be validated on all shards instead of one random shard + */ + public void allShards(boolean allShards) { + this.allShards = allShards; + } + + /** + * Indicates whether the query should be validated on all shards instead of one random shard + */ + public boolean allShards() { + return allShards; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -138,6 +154,9 @@ public class ValidateQueryRequest extends BroadcastRequest } explain = in.readBoolean(); rewrite = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_5_4_0_UNRELEASED)) { + allShards = in.readBoolean(); + } } @Override @@ -150,11 +169,14 @@ public class ValidateQueryRequest extends BroadcastRequest } out.writeBoolean(explain); out.writeBoolean(rewrite); + if (out.getVersion().onOrAfter(Version.V_5_4_0_UNRELEASED)) { + out.writeBoolean(allShards); + } } @Override public String toString() { return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", query[" + query + "], explain:" + explain + - ", rewrite:" + rewrite; + ", rewrite:" + rewrite + ", all_shards:" + allShards; } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java index 8e377968980..bd8067e05cb 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java @@ -64,4 +64,12 @@ public class ValidateQueryRequestBuilder extends BroadcastOperationRequestBuilde request.rewrite(rewrite); return this; } + + /** + * Indicates whether the query should be validated on all shards + */ + public ValidateQueryRequestBuilder setAllShards(boolean rewrite) { + request.allShards(rewrite); + return this; + } } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/byscroll/AbstractAsyncBulkByScrollAction.java b/core/src/main/java/org/elasticsearch/action/bulk/byscroll/AbstractAsyncBulkByScrollAction.java index 834321f1798..72d39c038ed 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/byscroll/AbstractAsyncBulkByScrollAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/byscroll/AbstractAsyncBulkByScrollAction.java @@ -465,14 +465,18 @@ public abstract class AbstractAsyncBulkByScrollAction indexingFailures, List searchFailures, boolean timedOut) { - scrollSource.close(); - if (failure == null) { - listener.onResponse( - buildResponse(timeValueNanos(System.nanoTime() - startTime.get()), indexingFailures, searchFailures, timedOut)); - } else { - listener.onFailure(failure); - } + protected void finishHim(Exception failure, List indexingFailures, + List searchFailures, boolean timedOut) { + scrollSource.close(() -> { + if (failure == null) { + BulkByScrollResponse response = buildResponse( + timeValueNanos(System.nanoTime() - startTime.get()), + indexingFailures, searchFailures, timedOut); + listener.onResponse(response); + } else { + listener.onFailure(failure); + } + }); } /** diff --git a/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ClientScrollableHitSource.java b/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ClientScrollableHitSource.java index 9fc02e29e62..3bacc187ebb 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ClientScrollableHitSource.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ClientScrollableHitSource.java @@ -113,8 +113,8 @@ public class ClientScrollableHitSource extends ScrollableHitSource { } @Override - protected void cleanup() { - // Nothing to do + protected void cleanup(Runnable onCompletion) { + onCompletion.run(); } /** diff --git a/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ScrollableHitSource.java b/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ScrollableHitSource.java index 73aa6536986..6426bad592f 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ScrollableHitSource.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ScrollableHitSource.java @@ -47,7 +47,7 @@ import static java.util.Objects.requireNonNull; /** * A scrollable source of results. */ -public abstract class ScrollableHitSource implements Closeable { +public abstract class ScrollableHitSource { private final AtomicReference scrollId = new AtomicReference<>(); protected final Logger logger; @@ -82,25 +82,31 @@ public abstract class ScrollableHitSource implements Closeable { } protected abstract void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, Consumer onResponse); - @Override - public final void close() { + public final void close(Runnable onCompletion) { String scrollId = this.scrollId.get(); if (Strings.hasLength(scrollId)) { - clearScroll(scrollId, this::cleanup); + clearScroll(scrollId, () -> cleanup(onCompletion)); } else { - cleanup(); + cleanup(onCompletion); } } + /** * Called to clear a scroll id. + * * @param scrollId the id to clear - * @param onCompletion implementers must call this after completing the clear whether they are successful or not + * @param onCompletion implementers must call this after completing the clear whether they are + * successful or not */ protected abstract void clearScroll(String scrollId, Runnable onCompletion); /** - * Called after the process has been totally finished to clean up any resources the process needed like remote connections. + * Called after the process has been totally finished to clean up any resources the process + * needed like remote connections. + * + * @param onCompletion implementers must call this after completing the cleanup whether they are + * successful or not */ - protected abstract void cleanup(); + protected abstract void cleanup(Runnable onCompletion); /** * Set the id of the last scroll. Used for debugging. diff --git a/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index bf95b7517c6..b90b5c8240e 100644 --- a/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -43,6 +43,7 @@ import org.elasticsearch.transport.Transport; import java.util.List; import java.util.Map; import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.stream.Collectors; @@ -67,17 +68,17 @@ abstract class AbstractSearchAsyncAction exten private final SetOnce> shardFailures = new SetOnce<>(); private final Object shardFailuresMutex = new Object(); private final AtomicInteger successfulOps = new AtomicInteger(); - private final long startTime; + private final TransportSearchAction.SearchTimeProvider timeProvider; protected AbstractSearchAsyncAction(String name, Logger logger, SearchTransportService searchTransportService, Function nodeIdToConnection, Map aliasFilter, Map concreteIndexBoosts, Executor executor, SearchRequest request, - ActionListener listener, GroupShardsIterator shardsIts, long startTime, + ActionListener listener, GroupShardsIterator shardsIts, TransportSearchAction.SearchTimeProvider timeProvider, long clusterStateVersion, SearchTask task, SearchPhaseResults resultConsumer) { super(name, request, shardsIts, logger); - this.startTime = startTime; + this.timeProvider = timeProvider; this.logger = logger; this.searchTransportService = searchTransportService; this.executor = executor; @@ -94,10 +95,9 @@ abstract class AbstractSearchAsyncAction exten /** * Builds how long it took to execute the search. */ - private long buildTookInMillis() { - // protect ourselves against time going backwards - // negative values don't make sense and we want to be able to serialize that thing as a vLong - return Math.max(1, System.currentTimeMillis() - startTime); + long buildTookInMillis() { + return TimeUnit.NANOSECONDS.toMillis( + timeProvider.getRelativeCurrentNanos() - timeProvider.getRelativeStartNanos()); } /** @@ -122,7 +122,8 @@ abstract class AbstractSearchAsyncAction exten if (successfulOps.get() == 0) { // we have 0 successful results that means we shortcut stuff and return a failure if (logger.isDebugEnabled()) { final ShardOperationFailedException[] shardSearchFailures = ExceptionsHelper.groupBy(buildShardFailures()); - Throwable cause = ElasticsearchException.guessRootCauses(shardSearchFailures[0].getCause())[0]; + Throwable cause = shardSearchFailures.length == 0 ? null : + ElasticsearchException.guessRootCauses(shardSearchFailures[0].getCause())[0]; logger.debug((Supplier) () -> new ParameterizedMessage("All shards failed for phase: [{}]", getName()), cause); } @@ -300,7 +301,7 @@ abstract class AbstractSearchAsyncAction exten assert filter != null; float indexBoost = concreteIndexBoosts.getOrDefault(shard.index().getUUID(), DEFAULT_INDEX_BOOST); return new ShardSearchTransportRequest(request, shardIt.shardId(), getNumShards(), - filter, indexBoost, startTime); + filter, indexBoost, timeProvider.getAbsoluteStartMillis()); } /** diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index d846c42dbea..d3b2ea3a98e 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -33,28 +33,59 @@ import java.util.concurrent.Executor; import java.util.function.Function; final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction { + private final SearchPhaseController searchPhaseController; - SearchDfsQueryThenFetchAsyncAction(Logger logger, SearchTransportService searchTransportService, - Function nodeIdToConnection, - Map aliasFilter, Map concreteIndexBoosts, - SearchPhaseController searchPhaseController, Executor executor, SearchRequest request, - ActionListener listener, GroupShardsIterator shardsIts, long startTime, - long clusterStateVersion, SearchTask task) { - super("dfs", logger, searchTransportService, nodeIdToConnection, aliasFilter, concreteIndexBoosts, executor, - request, listener, shardsIts, startTime, clusterStateVersion, task, new SearchPhaseResults<>(shardsIts.size())); + SearchDfsQueryThenFetchAsyncAction( + final Logger logger, + final SearchTransportService searchTransportService, + final Function nodeIdToConnection, + final Map aliasFilter, + final Map concreteIndexBoosts, + final SearchPhaseController searchPhaseController, + final Executor executor, + final SearchRequest request, + final ActionListener listener, + final GroupShardsIterator shardsIts, + final TransportSearchAction.SearchTimeProvider timeProvider, + final long clusterStateVersion, + final SearchTask task) { + super( + "dfs", + logger, + searchTransportService, + nodeIdToConnection, + aliasFilter, + concreteIndexBoosts, + executor, + request, + listener, + shardsIts, + timeProvider, + clusterStateVersion, + task, + new SearchPhaseResults<>(shardsIts.size())); this.searchPhaseController = searchPhaseController; } @Override - protected void executePhaseOnShard(ShardIterator shardIt, ShardRouting shard, ActionListener listener) { + protected void executePhaseOnShard( + final ShardIterator shardIt, + final ShardRouting shard, + final ActionListener listener) { getSearchTransport().sendExecuteDfs(getConnection(shard.currentNodeId()), buildShardSearchRequest(shardIt, shard) , getTask(), listener); } @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { - return new DfsQueryPhase(results.results, searchPhaseController, - (queryResults) -> new FetchSearchPhase(queryResults, searchPhaseController, context), context); + protected SearchPhase getNextPhase( + final SearchPhaseResults results, final SearchPhaseContext context) { + return new DfsQueryPhase( + results.results, + searchPhaseController, + (queryResults) -> + new FetchSearchPhase(queryResults, searchPhaseController, context), + context); } + } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index 210a9aefda7..fe87b8f4dba 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -32,30 +32,60 @@ import java.util.Map; import java.util.concurrent.Executor; import java.util.function.Function; -final class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction { +final class SearchQueryThenFetchAsyncAction + extends AbstractSearchAsyncAction { + private final SearchPhaseController searchPhaseController; - SearchQueryThenFetchAsyncAction(Logger logger, SearchTransportService searchTransportService, - Function nodeIdToConnection, - Map aliasFilter, Map concreteIndexBoosts, - SearchPhaseController searchPhaseController, Executor executor, - SearchRequest request, ActionListener listener, - GroupShardsIterator shardsIts, long startTime, long clusterStateVersion, - SearchTask task) { - super("query", logger, searchTransportService, nodeIdToConnection, aliasFilter, concreteIndexBoosts, executor, - request, listener, shardsIts, startTime, clusterStateVersion, task, - searchPhaseController.newSearchPhaseResults(request, shardsIts.size())); + SearchQueryThenFetchAsyncAction( + final Logger logger, + final SearchTransportService searchTransportService, + final Function nodeIdToConnection, + final Map aliasFilter, + final Map concreteIndexBoosts, + final SearchPhaseController searchPhaseController, + final Executor executor, + final SearchRequest request, + final ActionListener listener, + final GroupShardsIterator shardsIts, + final TransportSearchAction.SearchTimeProvider timeProvider, + long clusterStateVersion, + SearchTask task) { + super( + "query", + logger, + searchTransportService, + nodeIdToConnection, + aliasFilter, + concreteIndexBoosts, + executor, + request, + listener, + shardsIts, + timeProvider, + clusterStateVersion, + task, + searchPhaseController.newSearchPhaseResults(request, shardsIts.size())); this.searchPhaseController = searchPhaseController; } - protected void executePhaseOnShard(ShardIterator shardIt, ShardRouting shard, ActionListener listener) { - getSearchTransport().sendExecuteQuery(getConnection(shard.currentNodeId()), - buildShardSearchRequest(shardIt, shard), getTask(), listener); + protected void executePhaseOnShard( + final ShardIterator shardIt, + final ShardRouting shard, + final ActionListener listener) { + getSearchTransport().sendExecuteQuery( + getConnection(shard.currentNodeId()), + buildShardSearchRequest(shardIt, shard), + getTask(), + listener); } @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase( + final SearchPhaseResults results, + final SearchPhaseContext context) { return new FetchSearchPhase(results, searchPhaseController, context); } + } diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index e86cfef6e14..008d022a655 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -52,6 +52,7 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; import java.util.function.Function; +import java.util.function.LongSupplier; import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH; @@ -116,10 +117,62 @@ public class TransportSearchAction extends HandledTransportAction listener) { - // pure paranoia if time goes backwards we are at least positive - final long startTimeInMillis = Math.max(0, System.currentTimeMillis()); + final long absoluteStartMillis = System.currentTimeMillis(); + final long relativeStartNanos = System.nanoTime(); + final SearchTimeProvider timeProvider = + new SearchTimeProvider(absoluteStartMillis, relativeStartNanos, System::nanoTime); + final String[] localIndices; final Map> remoteClusterIndices; final ClusterState clusterState = clusterService.state(); @@ -134,7 +187,7 @@ public class TransportSearchAction extends HandledTransportAction null, clusterState, Collections.emptyMap(), listener); } else { remoteClusterService.collectSearchShards(searchRequest, remoteClusterIndices, @@ -143,13 +196,13 @@ public class TransportSearchAction extends HandledTransportAction remoteAliasFilters = new HashMap<>(); Function connectionFunction = remoteClusterService.processRemoteShards( searchShardsResponses, remoteShardIterators, remoteAliasFilters); - executeSearch((SearchTask)task, startTimeInMillis, searchRequest, localIndices, remoteShardIterators, + executeSearch((SearchTask)task, timeProvider, searchRequest, localIndices, remoteShardIterators, connectionFunction, clusterState, remoteAliasFilters, listener); }, listener::onFailure)); } } - private void executeSearch(SearchTask task, long startTimeInMillis, SearchRequest searchRequest, String[] localIndices, + private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, SearchRequest searchRequest, String[] localIndices, List remoteShardIterators, Function remoteConnections, ClusterState clusterState, Map remoteAliasMap, ActionListener listener) { @@ -163,7 +216,7 @@ public class TransportSearchAction extends HandledTransportAction aliasFilter = buildPerIndexAliasFilter(searchRequest, clusterState, indices, remoteAliasMap); Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), @@ -211,7 +264,7 @@ public class TransportSearchAction extends HandledTransportAction connectionLookup, + SearchTimeProvider timeProvider, Function connectionLookup, long clusterStateVersion, Map aliasFilter, Map concreteIndexBoosts, ActionListener listener) { @@ -245,12 +298,12 @@ public class TransportSearchAction extends HandledTransportAction extends BaseFuture implements try { return get(); } catch (InterruptedException e) { + Thread.currentThread().interrupt(); throw new IllegalStateException("Future got interrupted", e); } catch (ExecutionException e) { throw rethrowExecutionException(e); @@ -66,6 +67,7 @@ public abstract class AdapterActionFuture extends BaseFuture implements } catch (TimeoutException e) { throw new ElasticsearchTimeoutException(e); } catch (InterruptedException e) { + Thread.currentThread().interrupt(); throw new IllegalStateException("Future got interrupted", e); } catch (ExecutionException e) { throw rethrowExecutionException(e); @@ -100,4 +102,5 @@ public abstract class AdapterActionFuture extends BaseFuture implements } protected abstract T convert(L listenerResponse); + } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index 231b9ac6d03..7e82852a9f3 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -48,16 +48,21 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; /** - * We enforce bootstrap checks once a node has the transport protocol bound to a non-loopback interface. In this case we assume the node is - * running in production and all bootstrap checks must pass. + * We enforce bootstrap checks once a node has the transport protocol bound to a non-loopback interface or if the system property {@code + * es.enforce.bootstrap.checks} is set to {@true}. In this case we assume the node is running in production and all bootstrap checks must + * pass. */ final class BootstrapChecks { private BootstrapChecks() { } + static final String ES_ENFORCE_BOOTSTRAP_CHECKS = "es.enforce.bootstrap.checks"; + /** - * Executes the bootstrap checks if the node has the transport protocol bound to a non-loopback interface. + * Executes the bootstrap checks if the node has the transport protocol bound to a non-loopback interface. If the system property + * {@code es.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced regardless of whether or not + * the transport protocol is bound to a non-loopback interface. * * @param settings the current node settings * @param boundTransportAddress the node network bindings @@ -74,7 +79,9 @@ final class BootstrapChecks { } /** - * Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. + * Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. If the system + * property {@code es.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced regardless of whether + * or not the transport protocol is bound to a non-loopback interface. * * @param enforceLimits {@code true} if the checks should be enforced or otherwise warned * @param checks the checks to execute @@ -88,7 +95,9 @@ final class BootstrapChecks { } /** - * Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. + * Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. If the system + * property {@code es.enforce.bootstrap.checks }is set to {@code true} then the bootstrap checks will be enforced regardless of whether + * or not the transport protocol is bound to a non-loopback interface. * * @param enforceLimits {@code true} if the checks should be enforced or otherwise warned * @param checks the checks to execute @@ -101,13 +110,31 @@ final class BootstrapChecks { final List errors = new ArrayList<>(); final List ignoredErrors = new ArrayList<>(); + final String esEnforceBootstrapChecks = System.getProperty(ES_ENFORCE_BOOTSTRAP_CHECKS); + final boolean enforceBootstrapChecks; + if (esEnforceBootstrapChecks == null) { + enforceBootstrapChecks = false; + } else if (Boolean.TRUE.toString().equals(esEnforceBootstrapChecks)) { + enforceBootstrapChecks = true; + } else { + final String message = + String.format( + Locale.ROOT, + "[%s] must be [true] but was [%s]", + ES_ENFORCE_BOOTSTRAP_CHECKS, + esEnforceBootstrapChecks); + throw new IllegalArgumentException(message); + } + if (enforceLimits) { logger.info("bound or publishing to a non-loopback or non-link-local address, enforcing bootstrap checks"); + } else if (enforceBootstrapChecks) { + logger.info("explicitly enforcing bootstrap checks"); } for (final BootstrapCheck check : checks) { if (check.check()) { - if (!enforceLimits && !check.alwaysEnforce()) { + if (!(enforceLimits || enforceBootstrapChecks) && !check.alwaysEnforce()) { ignoredErrors.add(check.errorMessage()); } else { errors.add(check.errorMessage()); @@ -127,7 +154,6 @@ final class BootstrapChecks { errors.stream().map(IllegalStateException::new).forEach(ne::addSuppressed); throw ne; } - } static void log(final Logger logger, final String error) { @@ -140,9 +166,9 @@ final class BootstrapChecks { * @param boundTransportAddress the node network bindings * @return {@code true} if the checks should be enforced */ - static boolean enforceLimits(BoundTransportAddress boundTransportAddress) { - Predicate isLoopbackOrLinkLocalAddress = t -> t.address().getAddress().isLinkLocalAddress() - || t.address().getAddress().isLoopbackAddress(); + static boolean enforceLimits(final BoundTransportAddress boundTransportAddress) { + Predicate isLoopbackOrLinkLocalAddress = + t -> t.address().getAddress().isLinkLocalAddress() || t.address().getAddress().isLoopbackAddress(); return !(Arrays.stream(boundTransportAddress.boundAddresses()).allMatch(isLoopbackOrLinkLocalAddress) && isLoopbackOrLinkLocalAddress.test(boundTransportAddress.publishAddress())); } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java b/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java index e8538daec56..74fa7e0c1d5 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java @@ -31,6 +31,7 @@ import java.security.PermissionCollection; import java.security.Permissions; import java.security.Policy; import java.security.ProtectionDomain; +import java.util.Collections; import java.util.Map; import java.util.function.Predicate; @@ -50,7 +51,7 @@ final class ESPolicy extends Policy { ESPolicy(PermissionCollection dynamic, Map plugins, boolean filterBadDefaults) { this.template = Security.readPolicy(getClass().getResource(POLICY_RESOURCE), JarHell.parseClassPath()); - this.untrusted = Security.readPolicy(getClass().getResource(UNTRUSTED_RESOURCE), new URL[0]); + this.untrusted = Security.readPolicy(getClass().getResource(UNTRUSTED_RESOURCE), Collections.emptySet()); if (filterBadDefaults) { this.system = new SystemPolicy(Policy.getPolicy()); } else { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java index 22ba936d903..c5346bf243d 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java @@ -36,9 +36,11 @@ import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.Arrays; +import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.Locale; import java.util.Map; import java.util.Set; @@ -93,7 +95,7 @@ public class JarHell { * @return array of URLs * @throws IllegalStateException if the classpath contains empty elements */ - public static URL[] parseClassPath() { + public static Set parseClassPath() { return parseClassPath(System.getProperty("java.class.path")); } @@ -104,13 +106,12 @@ public class JarHell { * @throws IllegalStateException if the classpath contains empty elements */ @SuppressForbidden(reason = "resolves against CWD because that is how classpaths work") - static URL[] parseClassPath(String classPath) { + static Set parseClassPath(String classPath) { String pathSeparator = System.getProperty("path.separator"); String fileSeparator = System.getProperty("file.separator"); String elements[] = classPath.split(pathSeparator); - URL urlElements[] = new URL[elements.length]; - for (int i = 0; i < elements.length; i++) { - String element = elements[i]; + Set urlElements = new LinkedHashSet<>(); // order is already lost, but some filesystems have it + for (String element : elements) { // Technically empty classpath element behaves like CWD. // So below is the "correct" code, however in practice with ES, this is usually just a misconfiguration, // from old shell scripts left behind or something: @@ -136,13 +137,17 @@ public class JarHell { } // now just parse as ordinary file try { - urlElements[i] = PathUtils.get(element).toUri().toURL(); + URL url = PathUtils.get(element).toUri().toURL(); + if (urlElements.add(url) == false) { + throw new IllegalStateException("jar hell!" + System.lineSeparator() + + "duplicate jar on classpath: " + classPath); + } } catch (MalformedURLException e) { // should not happen, as we use the filesystem API throw new RuntimeException(e); } } - return urlElements; + return Collections.unmodifiableSet(urlElements); } /** @@ -150,7 +155,7 @@ public class JarHell { * @throws IllegalStateException if jar hell was found */ @SuppressForbidden(reason = "needs JarFile for speed, just reading entries") - public static void checkJarHell(URL urls[]) throws URISyntaxException, IOException { + public static void checkJarHell(Set urls) throws URISyntaxException, IOException { Logger logger = Loggers.getLogger(JarHell.class); // we don't try to be sneaky and use deprecated/internal/not portable stuff // like sun.boot.class.path, and with jigsaw we don't yet have a way to get @@ -168,8 +173,8 @@ public class JarHell { } if (path.toString().endsWith(".jar")) { if (!seenJars.add(path)) { - logger.debug("excluding duplicate classpath element: {}", path); - continue; + throw new IllegalStateException("jar hell!" + System.lineSeparator() + + "duplicate jar on classpath: " + path); } logger.debug("examining jar: {}", path); try (JarFile file = new JarFile(path.toString())) { @@ -198,8 +203,8 @@ public class JarHell { public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { String entry = root.relativize(file).toString(); if (entry.endsWith(".class")) { - // normalize with the os separator - entry = entry.replace(sep, ".").substring(0, entry.length() - 6); + // normalize with the os separator, remove '.class' + entry = entry.replace(sep, ".").substring(0, entry.length() - ".class".length()); checkClass(clazzes, entry, path); } return super.visitFile(file, attrs); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java index 3b59f235b1c..de16bbe76aa 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -48,8 +48,10 @@ import java.security.URIParameter; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Set; /** * Initializes SecurityManager with necessary permissions. @@ -127,19 +129,23 @@ final class Security { @SuppressForbidden(reason = "proper use of URL") static Map getPluginPermissions(Environment environment) throws IOException, NoSuchAlgorithmException { Map map = new HashMap<>(); - // collect up lists of plugins and modules - List pluginsAndModules = new ArrayList<>(); + // collect up set of plugins and modules by listing directories. + Set pluginsAndModules = new LinkedHashSet<>(); // order is already lost, but some filesystems have it if (Files.exists(environment.pluginsFile())) { try (DirectoryStream stream = Files.newDirectoryStream(environment.pluginsFile())) { for (Path plugin : stream) { - pluginsAndModules.add(plugin); + if (pluginsAndModules.add(plugin) == false) { + throw new IllegalStateException("duplicate plugin: " + plugin); + } } } } if (Files.exists(environment.modulesFile())) { try (DirectoryStream stream = Files.newDirectoryStream(environment.modulesFile())) { - for (Path plugin : stream) { - pluginsAndModules.add(plugin); + for (Path module : stream) { + if (pluginsAndModules.add(module) == false) { + throw new IllegalStateException("duplicate module: " + module); + } } } } @@ -149,15 +155,18 @@ final class Security { if (Files.exists(policyFile)) { // first get a list of URLs for the plugins' jars: // we resolve symlinks so map is keyed on the normalize codebase name - List codebases = new ArrayList<>(); + Set codebases = new LinkedHashSet<>(); // order is already lost, but some filesystems have it try (DirectoryStream jarStream = Files.newDirectoryStream(plugin, "*.jar")) { for (Path jar : jarStream) { - codebases.add(jar.toRealPath().toUri().toURL()); + URL url = jar.toRealPath().toUri().toURL(); + if (codebases.add(url) == false) { + throw new IllegalStateException("duplicate module/plugin: " + url); + } } } // parse the plugin's policy file into a set of permissions - Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()])); + Policy policy = readPolicy(policyFile.toUri().toURL(), codebases); // consult this policy for each of the plugin's jars: for (URL url : codebases) { @@ -175,24 +184,33 @@ final class Security { /** * Reads and returns the specified {@code policyFile}. *

- * Resources (e.g. jar files and directories) listed in {@code codebases} location - * will be provided to the policy file via a system property of the short name: - * e.g. ${codebase.joda-convert-1.2.jar} would map to full URL. + * Jar files listed in {@code codebases} location will be provided to the policy file via + * a system property of the short name: e.g. ${codebase.joda-convert-1.2.jar} + * would map to full URL. */ @SuppressForbidden(reason = "accesses fully qualified URLs to configure security") - static Policy readPolicy(URL policyFile, URL codebases[]) { + static Policy readPolicy(URL policyFile, Set codebases) { try { try { // set codebase properties for (URL url : codebases) { String shortName = PathUtils.get(url.toURI()).getFileName().toString(); - System.setProperty("codebase." + shortName, url.toString()); + if (shortName.endsWith(".jar") == false) { + continue; // tests :( + } + String previous = System.setProperty("codebase." + shortName, url.toString()); + if (previous != null) { + throw new IllegalStateException("codebase property already set: " + shortName + "->" + previous); + } } return Policy.getInstance("JavaPolicy", new URIParameter(policyFile.toURI())); } finally { // clear codebase properties for (URL url : codebases) { String shortName = PathUtils.get(url.toURI()).getFileName().toString(); + if (shortName.endsWith(".jar") == false) { + continue; // tests :( + } System.clearProperty("codebase." + shortName); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java index c9a492534ac..5ef06f178a8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -66,24 +66,24 @@ import java.util.Set; /** * Represents the current state of the cluster. *

- * The cluster state object is immutable with an exception of the {@link RoutingNodes} structure, which is + * The cluster state object is immutable with the exception of the {@link RoutingNodes} structure, which is * built on demand from the {@link RoutingTable}. * The cluster state can be updated only on the master node. All updates are performed by on a * single thread and controlled by the {@link ClusterService}. After every update the - * {@link Discovery#publish} method publishes new version of the cluster state to all other nodes in the + * {@link Discovery#publish} method publishes a new version of the cluster state to all other nodes in the * cluster. The actual publishing mechanism is delegated to the {@link Discovery#publish} method and depends on * the type of discovery. In the Zen Discovery it is handled in the {@link PublishClusterStateAction#publish} method. The * publishing mechanism can be overridden by other discovery. *

* The cluster state implements the {@link Diffable} interface in order to support publishing of cluster state * differences instead of the entire state on each change. The publishing mechanism should only send differences - * to a node if this node was present in the previous version of the cluster state. If a node is not present was - * not present in the previous version of the cluster state, such node is unlikely to have the previous cluster - * state version and should be sent a complete version. In order to make sure that the differences are applied to + * to a node if this node was present in the previous version of the cluster state. If a node was + * not present in the previous version of the cluster state, this node is unlikely to have the previous cluster + * state version and should be sent a complete version. In order to make sure that the differences are applied to the * correct version of the cluster state, each cluster state version update generates {@link #stateUUID} that uniquely * identifies this version of the state. This uuid is verified by the {@link ClusterStateDiff#apply} method to - * makes sure that the correct diffs are applied. If uuids don’t match, the {@link ClusterStateDiff#apply} method - * throws the {@link IncompatibleClusterStateVersionException}, which should cause the publishing mechanism to send + * make sure that the correct diffs are applied. If uuids don’t match, the {@link ClusterStateDiff#apply} method + * throws the {@link IncompatibleClusterStateVersionException}, which causes the publishing mechanism to send * a full version of the cluster state to the node on which this exception was thrown. */ public class ClusterState implements ToXContent, Diffable { @@ -252,8 +252,8 @@ public class ClusterState implements ToXContent, Diffable { } /** - * a cluster state supersedes another state iff they are from the same master and the version this state is higher than the other - * state. + * a cluster state supersedes another state if they are from the same master and the version of this state is higher than that of the + * other state. *

* In essence that means that all the changes from the other cluster state are also reflected by the current one */ diff --git a/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java b/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java index 55e70dbe644..8f6527ccaa7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java +++ b/core/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java @@ -89,6 +89,18 @@ public class RestoreInProgress extends AbstractNamedDiffable implements return entries.hashCode(); } + @Override + public String toString() { + StringBuilder builder = new StringBuilder("RestoreInProgress["); + for (int i = 0; i < entries.size(); i++) { + builder.append(entries.get(i).snapshot().getSnapshotId().getName()); + if (i + 1 < entries.size()) { + builder.append(","); + } + } + return builder.append("]").toString(); + } + /** * Restore metadata */ diff --git a/core/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java b/core/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java index b3ab12fe21a..446f4ae0741 100644 --- a/core/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java +++ b/core/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java @@ -150,6 +150,18 @@ public class SnapshotDeletionsInProgress extends AbstractNamedDiffable i return builder; } + @Override + public String toString() { + StringBuilder builder = new StringBuilder("SnapshotDeletionsInProgress["); + for (int i = 0; i < entries.size(); i++) { + builder.append(entries.get(i).getSnapshot().getSnapshotId().getName()); + if (i + 1 < entries.size()) { + builder.append(","); + } + } + return builder.append("]").toString(); + } + /** * A class representing a snapshot deletion request entry in the cluster state. */ diff --git a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index 0ac1e8e4090..1e1b61281b4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -70,6 +70,18 @@ public class SnapshotsInProgress extends AbstractNamedDiffable implement return entries.hashCode(); } + @Override + public String toString() { + StringBuilder builder = new StringBuilder("SnapshotsInProgress["); + for (int i = 0; i < entries.size(); i++) { + builder.append(entries.get(i).snapshot().getSnapshotId().getName()); + if (i + 1 < entries.size()) { + builder.append(","); + } + } + return builder.append("]").toString(); + } + public static class Entry { private final State state; private final Snapshot snapshot; diff --git a/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java b/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java index 4c2f3a6d48a..16757187196 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java @@ -104,6 +104,13 @@ public abstract class SecureSetting extends Setting { // TODO: override toXContent + /** + * Overrides the diff operation to make this a no-op for secure settings as they shouldn't be returned in a diff + */ + @Override + public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) { + } + /** * A setting which contains a sensitive string. * diff --git a/core/src/main/java/org/elasticsearch/common/settings/SecureString.java b/core/src/main/java/org/elasticsearch/common/settings/SecureString.java index 36982ddde1c..85c4c566db1 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SecureString.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SecureString.java @@ -106,8 +106,39 @@ public final class SecureString implements CharSequence, Closeable { */ @Override public synchronized void close() { - Arrays.fill(chars, '\0'); - chars = null; + if (chars != null) { + Arrays.fill(chars, '\0'); + chars = null; + } + } + + /** + * Returns a new copy of this object that is backed by its own char array. Closing the new instance has no effect on the instance it + * was created from. This is useful for APIs which accept a char array and you want to be safe about the API potentially modifying the + * char array. For example: + * + *

+     *     try (SecureString copy = secureString.clone()) {
+     *         // pass thee char[] to a external API
+     *         PasswordAuthentication auth = new PasswordAuthentication(username, copy.getChars());
+     *         ...
+     *     }
+     * 
+ */ + @Override + public synchronized SecureString clone() { + ensureNotClosed(); + return new SecureString(Arrays.copyOf(chars, chars.length)); + } + + /** + * Returns the underlying char[]. This is a dangerous operation as the array may be modified while it is being used by other threads + * or a consumer may modify the values in the array. For safety, it is preferable to use {@link #clone()} and pass its chars to the + * consumer when the chars are needed multiple times. + */ + public synchronized char[] getChars() { + ensureNotClosed(); + return chars; } /** Throw an exception if this string has been closed, indicating something is trying to access the data after being closed. */ diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index 6658913cce6..f6f5bafa275 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -65,6 +65,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; @@ -214,6 +215,9 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { final List> futures = executorService.invokeAll(callables, resolveTimeout.nanos(), TimeUnit.NANOSECONDS); final List discoveryNodes = new ArrayList<>(); + final Set localAddresses = new HashSet<>(); + localAddresses.add(transportService.boundAddress().publishAddress()); + localAddresses.addAll(Arrays.asList(transportService.boundAddress().boundAddresses())); // ExecutorService#invokeAll guarantees that the futures are returned in the iteration order of the tasks so we can associate the // hostname with the corresponding task by iterating together final Iterator it = hosts.iterator(); @@ -225,13 +229,17 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { final TransportAddress[] addresses = future.get(); logger.trace("resolved host [{}] to {}", hostname, addresses); for (int addressId = 0; addressId < addresses.length; addressId++) { - discoveryNodes.add( - new DiscoveryNode( - nodeId_prefix + hostname + "_" + addressId + "#", - addresses[addressId], - emptyMap(), - emptySet(), - Version.CURRENT.minimumCompatibilityVersion())); + final TransportAddress address = addresses[addressId]; + // no point in pinging ourselves + if (localAddresses.contains(address) == false) { + discoveryNodes.add( + new DiscoveryNode( + nodeId_prefix + hostname + "_" + addressId + "#", + address, + emptyMap(), + emptySet(), + Version.CURRENT.minimumCompatibilityVersion())); + } } } catch (final ExecutionException e) { assert e.getCause() != null; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index a8c74101bff..8b7355dca4b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -257,11 +257,6 @@ public class DocumentMapper implements ToXContent { return this.objectMappers; } - // TODO this method looks like it is only used in tests... - public ParsedDocument parse(String index, String type, String id, BytesReference source) throws MapperParsingException { - return parse(SourceToParse.source(index, type, id, source, XContentType.JSON)); - } - public ParsedDocument parse(SourceToParse source) throws MapperParsingException { return documentParser.parseDocument(source); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index aeab1e5c0cf..68983bcf63f 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -110,7 +110,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { private volatile Map mappers = emptyMap(); private volatile FieldTypeLookup fieldTypes; - private volatile Map fullPathObjectMappers = new HashMap<>(); + private volatile Map fullPathObjectMappers = emptyMap(); private boolean hasNested = false; // updated dynamically to true when a nested object is added private boolean allEnabled = false; // updated dynamically to true when _all is enabled @@ -394,6 +394,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { for (ObjectMapper objectMapper : objectMappers) { if (fullPathObjectMappers == this.fullPathObjectMappers) { + // first time through the loops fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers); } fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper); @@ -414,6 +415,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { if (oldMapper == null && newMapper.parentFieldMapper().active()) { if (parentTypes == this.parentTypes) { + // first time through the loop parentTypes = new HashSet<>(this.parentTypes); } parentTypes.add(mapper.parentFieldMapper().type()); @@ -456,8 +458,15 @@ public class MapperService extends AbstractIndexComponent implements Closeable { // make structures immutable mappers = Collections.unmodifiableMap(mappers); results = Collections.unmodifiableMap(results); - parentTypes = Collections.unmodifiableSet(parentTypes); - fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers); + + // only need to immutably rewrap these if the previous reference was changed. + // if not then they are already implicitly immutable. + if (fullPathObjectMappers != this.fullPathObjectMappers) { + fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers); + } + if (parentTypes != this.parentTypes) { + parentTypes = Collections.unmodifiableSet(parentTypes); + } // commit the change if (defaultMappingSource != null) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index 8920207778e..62ff8bdede0 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -265,11 +265,16 @@ public class ScaledFloatFieldMapper extends FieldMapper { if (stats == null) { return null; } - return new FieldStats.Double(stats.getMaxDoc(), stats.getDocCount(), + if (stats.hasMinMax()) { + return new FieldStats.Double(stats.getMaxDoc(), stats.getDocCount(), stats.getSumDocFreq(), stats.getSumTotalTermFreq(), stats.isSearchable(), stats.isAggregatable(), - stats.getMinValue() == null ? null : stats.getMinValue() / scalingFactor, - stats.getMaxValue() == null ? null : stats.getMaxValue() / scalingFactor); + stats.getMinValue() / scalingFactor, + stats.getMaxValue() / scalingFactor); + } + return new FieldStats.Double(stats.getMaxDoc(), stats.getDocCount(), + stats.getSumDocFreq(), stats.getSumTotalTermFreq(), + stats.isSearchable(), stats.isAggregatable()); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index 9c7c95b7211..51516c3dded 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -263,7 +263,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref } } - /** * Renames all the given files from the key of the map to the * value of the map. All successfully renamed files are removed from the map in-place. @@ -389,7 +388,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref } } - /** * Reads a MetadataSnapshot from the given index locations or returns an empty snapshot if it can't be read. * @@ -597,7 +595,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref /** * This method deletes every file in this store that is not contained in the given source meta data or is a * legacy checksum file. After the delete it pulls the latest metadata snapshot from the store and compares it - * to the given snapshot. If the snapshots are inconsistent an illegal state exception is thrown + * to the given snapshot. If the snapshots are inconsistent an illegal state exception is thrown. * * @param reason the reason for this cleanup operation logged for each deleted file * @param sourceMetaData the metadata used for cleanup. all files in this metadata should be kept around. @@ -641,9 +639,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref for (StoreFileMetaData meta : recoveryDiff.different) { StoreFileMetaData local = targetMetaData.get(meta.name()); StoreFileMetaData remote = sourceMetaData.get(meta.name()); - // if we have different files the they must have no checksums otherwise something went wrong during recovery. - // we have that problem when we have an empty index is only a segments_1 file then we can't tell if it's a Lucene 4.8 file - // and therefore no checksum. That isn't much of a problem since we simply copy it over anyway but those files come out as + // if we have different files then they must have no checksums; otherwise something went wrong during recovery. + // we have that problem when we have an empty index is only a segments_1 file so we can't tell if it's a Lucene 4.8 file + // and therefore no checksum is included. That isn't a problem since we simply copy it over anyway but those files come out as // different in the diff. That's why we have to double check here again if the rest of it matches. // all is fine this file is just part of a commit or a segment that is different @@ -676,7 +674,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref this.deletesLogger = deletesLogger; } - @Override public void close() throws IOException { assert false : "Nobody should close this directory except of the Store itself"; diff --git a/core/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java b/core/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java index 7bb1e51cd23..e20eb42427f 100644 --- a/core/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java @@ -138,8 +138,8 @@ public class FsInfo implements Iterable, Writeable, ToXContent { public void add(Path path) { total = FsProbe.adjustForHugeFilesystems(addLong(total, path.total)); - free = addLong(free, path.free); - available = addLong(available, path.available); + free = FsProbe.adjustForHugeFilesystems(addLong(free, path.free)); + available = FsProbe.adjustForHugeFilesystems(addLong(available, path.available)); if (path.spins != null && path.spins.booleanValue()) { // Spinning is contagious! spins = Boolean.TRUE; diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index be78dd927ff..7ab9996a709 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -105,6 +105,13 @@ class InstallPluginCommand extends EnvironmentAwareCommand { private static final String PROPERTY_STAGING_ID = "es.plugins.staging"; + // exit codes for install + /** A plugin with the same name is already installed. */ + static final int PLUGIN_EXISTS = 1; + /** The plugin zip is not properly structured. */ + static final int PLUGIN_MALFORMED = 2; + + /** The builtin modules, which are plugins, but cannot be installed or removed. */ static final Set MODULES; static { @@ -333,7 +340,8 @@ class InstallPluginCommand extends EnvironmentAwareCommand { byte[] zipbytes = Files.readAllBytes(zip); String gotChecksum = MessageDigests.toHexString(MessageDigests.sha1().digest(zipbytes)); if (expectedChecksum.equals(gotChecksum) == false) { - throw new UserException(ExitCodes.IO_ERROR, "SHA1 mismatch, expected " + expectedChecksum + " but got " + gotChecksum); + throw new UserException(ExitCodes.IO_ERROR, + "SHA1 mismatch, expected " + expectedChecksum + " but got " + gotChecksum); } return zip; @@ -357,12 +365,14 @@ class InstallPluginCommand extends EnvironmentAwareCommand { hasEsDir = true; Path targetFile = target.resolve(entry.getName().substring("elasticsearch/".length())); - // Using the entry name as a path can result in an entry outside of the plugin dir, either if the - // name starts with the root of the filesystem, or it is a relative entry like ../whatever. - // This check attempts to identify both cases by first normalizing the path (which removes foo/..) - // and ensuring the normalized entry is still rooted with the target plugin directory. + // Using the entry name as a path can result in an entry outside of the plugin dir, + // either if the name starts with the root of the filesystem, or it is a relative + // entry like ../whatever. This check attempts to identify both cases by first + // normalizing the path (which removes foo/..) and ensuring the normalized entry + // is still rooted with the target plugin directory. if (targetFile.normalize().startsWith(target) == false) { - throw new IOException("Zip contains entry name '" + entry.getName() + "' resolving outside of plugin directory"); + throw new UserException(PLUGIN_MALFORMED, "Zip contains entry name '" + + entry.getName() + "' resolving outside of plugin directory"); } // be on the safe side: do not rely on that directories are always extracted @@ -384,7 +394,8 @@ class InstallPluginCommand extends EnvironmentAwareCommand { Files.delete(zip); if (hasEsDir == false) { IOUtils.rm(target); - throw new UserException(ExitCodes.DATA_ERROR, "`elasticsearch` directory is missing in the plugin zip"); + throw new UserException(PLUGIN_MALFORMED, + "`elasticsearch` directory is missing in the plugin zip"); } return target; } @@ -424,10 +435,11 @@ class InstallPluginCommand extends EnvironmentAwareCommand { if (Files.exists(destination)) { final String message = String.format( Locale.ROOT, - "plugin directory [%s] already exists; if you need to update the plugin, uninstall it first using command 'remove %s'", + "plugin directory [%s] already exists; if you need to update the plugin, " + + "uninstall it first using command 'remove %s'", destination.toAbsolutePath(), info.getName()); - throw new UserException(ExitCodes.CONFIG, message); + throw new UserException(PLUGIN_EXISTS, message); } terminal.println(VERBOSE, info.toString()); @@ -435,8 +447,8 @@ class InstallPluginCommand extends EnvironmentAwareCommand { // don't let user install plugin as a module... // they might be unavoidably in maven central and are packaged up the same way) if (MODULES.contains(info.getName())) { - throw new UserException( - ExitCodes.USAGE, "plugin '" + info.getName() + "' cannot be installed like this, it is a system module"); + throw new UserException(ExitCodes.USAGE, "plugin '" + info.getName() + + "' cannot be installed like this, it is a system module"); } // check for jar hell before any copying @@ -455,8 +467,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand { /** check a candidate plugin for jar hell before installing it */ void jarHellCheck(Path candidate, Path pluginsDir) throws Exception { // create list of current jars in classpath - final List jars = new ArrayList<>(); - jars.addAll(Arrays.asList(JarHell.parseClassPath())); + final Set jars = new HashSet<>(JarHell.parseClassPath()); // read existing bundles. this does some checks on the installation too. PluginsService.getPluginBundles(pluginsDir); @@ -464,13 +475,15 @@ class InstallPluginCommand extends EnvironmentAwareCommand { // add plugin jars to the list Path pluginJars[] = FileSystemUtils.files(candidate, "*.jar"); for (Path jar : pluginJars) { - jars.add(jar.toUri().toURL()); + if (jars.add(jar.toUri().toURL()) == false) { + throw new IllegalStateException("jar hell! duplicate plugin jar: " + jar); + } } // TODO: no jars should be an error // TODO: verify the classname exists in one of the jars! // check combined (current classpath + new jars to-be-added) - JarHell.checkJarHell(jars.toArray(new URL[jars.size()])); + JarHell.checkJarHell(jars); } /** @@ -533,7 +546,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand { /** Copies the files from {@code tmpBinDir} into {@code destBinDir}, along with permissions from dest dirs parent. */ private void installBin(PluginInfo info, Path tmpBinDir, Path destBinDir) throws Exception { if (Files.isDirectory(tmpBinDir) == false) { - throw new UserException(ExitCodes.IO_ERROR, "bin in plugin " + info.getName() + " is not a directory"); + throw new UserException(PLUGIN_MALFORMED, "bin in plugin " + info.getName() + " is not a directory"); } Files.createDirectory(destBinDir); setFileAttributes(destBinDir, BIN_DIR_PERMS); @@ -541,9 +554,8 @@ class InstallPluginCommand extends EnvironmentAwareCommand { try (DirectoryStream stream = Files.newDirectoryStream(tmpBinDir)) { for (Path srcFile : stream) { if (Files.isDirectory(srcFile)) { - throw new UserException( - ExitCodes.DATA_ERROR, - "Directories not allowed in bin dir for plugin " + info.getName() + ", found " + srcFile.getFileName()); + throw new UserException(PLUGIN_MALFORMED, "Directories not allowed in bin dir " + + "for plugin " + info.getName() + ", found " + srcFile.getFileName()); } Path destFile = destBinDir.resolve(tmpBinDir.relativize(srcFile)); @@ -560,7 +572,8 @@ class InstallPluginCommand extends EnvironmentAwareCommand { */ private void installConfig(PluginInfo info, Path tmpConfigDir, Path destConfigDir) throws Exception { if (Files.isDirectory(tmpConfigDir) == false) { - throw new UserException(ExitCodes.IO_ERROR, "config in plugin " + info.getName() + " is not a directory"); + throw new UserException(PLUGIN_MALFORMED, + "config in plugin " + info.getName() + " is not a directory"); } Files.createDirectories(destConfigDir); @@ -576,7 +589,8 @@ class InstallPluginCommand extends EnvironmentAwareCommand { try (DirectoryStream stream = Files.newDirectoryStream(tmpConfigDir)) { for (Path srcFile : stream) { if (Files.isDirectory(srcFile)) { - throw new UserException(ExitCodes.DATA_ERROR, "Directories not allowed in config dir for plugin " + info.getName()); + throw new UserException(PLUGIN_MALFORMED, + "Directories not allowed in config dir for plugin " + info.getName()); } Path destFile = destConfigDir.resolve(tmpConfigDir.relativize(srcFile)); diff --git a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java index a674e7c6e24..1c6c4f17ff2 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java @@ -56,9 +56,17 @@ class ListPluginsCommand extends EnvironmentAwareCommand { } Collections.sort(plugins); for (final Path plugin : plugins) { - terminal.println(plugin.getFileName().toString()); - PluginInfo info = PluginInfo.readFromProperties(env.pluginsFile().resolve(plugin.toAbsolutePath())); - terminal.println(Terminal.Verbosity.VERBOSE, info.toString()); + terminal.println(Terminal.Verbosity.SILENT, plugin.getFileName().toString()); + try { + PluginInfo info = PluginInfo.readFromProperties(env.pluginsFile().resolve(plugin.toAbsolutePath())); + terminal.println(Terminal.Verbosity.VERBOSE, info.toString()); + } catch (IllegalArgumentException e) { + if (e.getMessage().contains("incompatible with Elasticsearch")) { + terminal.println("WARNING: " + e.getMessage()); + } else { + throw e; + } + } } } } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 7d988737058..9295c6c38d8 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -58,8 +58,10 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; @@ -107,16 +109,16 @@ public class PluginsService extends AbstractComponent { pluginsList.add(pluginInfo); } + Set seenBundles = new LinkedHashSet<>(); List modulesList = new ArrayList<>(); // load modules if (modulesDirectory != null) { try { - List bundles = getModuleBundles(modulesDirectory); - List> loaded = loadBundles(bundles); - pluginsLoaded.addAll(loaded); - for (Tuple module : loaded) { - modulesList.add(module.v1()); + Set modules = getModuleBundles(modulesDirectory); + for (Bundle bundle : modules) { + modulesList.add(bundle.plugin); } + seenBundles.addAll(modules); } catch (IOException ex) { throw new IllegalStateException("Unable to initialize modules", ex); } @@ -125,17 +127,19 @@ public class PluginsService extends AbstractComponent { // now, find all the ones that are in plugins/ if (pluginsDirectory != null) { try { - List bundles = getPluginBundles(pluginsDirectory); - List> loaded = loadBundles(bundles); - pluginsLoaded.addAll(loaded); - for (Tuple plugin : loaded) { - pluginsList.add(plugin.v1()); + Set plugins = getPluginBundles(pluginsDirectory); + for (Bundle bundle : plugins) { + pluginsList.add(bundle.plugin); } + seenBundles.addAll(plugins); } catch (IOException ex) { throw new IllegalStateException("Unable to initialize plugins", ex); } } + List> loaded = loadBundles(seenBundles); + pluginsLoaded.addAll(loaded); + this.info = new PluginsAndModules(pluginsList, modulesList); this.plugins = Collections.unmodifiableList(pluginsLoaded); @@ -234,48 +238,70 @@ public class PluginsService extends AbstractComponent { // a "bundle" is a group of plugins in a single classloader // really should be 1-1, but we are not so fortunate static class Bundle { - List plugins = new ArrayList<>(); - List urls = new ArrayList<>(); + final PluginInfo plugin; + final Set urls; + + Bundle(PluginInfo plugin, Set urls) { + this.plugin = Objects.requireNonNull(plugin); + this.urls = Objects.requireNonNull(urls); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Bundle bundle = (Bundle) o; + return Objects.equals(plugin, bundle.plugin); + } + + @Override + public int hashCode() { + return Objects.hash(plugin); + } } // similar in impl to getPluginBundles, but DO NOT try to make them share code. // we don't need to inherit all the leniency, and things are different enough. - static List getModuleBundles(Path modulesDirectory) throws IOException { + static Set getModuleBundles(Path modulesDirectory) throws IOException { // damn leniency if (Files.notExists(modulesDirectory)) { - return Collections.emptyList(); + return Collections.emptySet(); } - List bundles = new ArrayList<>(); + Set bundles = new LinkedHashSet<>(); try (DirectoryStream stream = Files.newDirectoryStream(modulesDirectory)) { for (Path module : stream) { if (FileSystemUtils.isHidden(module)) { continue; // skip over .DS_Store etc } PluginInfo info = PluginInfo.readFromProperties(module); - Bundle bundle = new Bundle(); - bundle.plugins.add(info); + Set urls = new LinkedHashSet<>(); // gather urls for jar files try (DirectoryStream jarStream = Files.newDirectoryStream(module, "*.jar")) { for (Path jar : jarStream) { // normalize with toRealPath to get symlinks out of our hair - bundle.urls.add(jar.toRealPath().toUri().toURL()); + URL url = jar.toRealPath().toUri().toURL(); + if (urls.add(url) == false) { + throw new IllegalStateException("duplicate codebase: " + url); + } } } - bundles.add(bundle); + if (bundles.add(new Bundle(info, urls)) == false) { + throw new IllegalStateException("duplicate module: " + info); + } } } return bundles; } - static List getPluginBundles(Path pluginsDirectory) throws IOException { + static Set getPluginBundles(Path pluginsDirectory) throws IOException { Logger logger = Loggers.getLogger(PluginsService.class); // TODO: remove this leniency, but tests bogusly rely on it if (!isAccessibleDirectory(pluginsDirectory, logger)) { - return Collections.emptyList(); + return Collections.emptySet(); } - List bundles = new ArrayList<>(); + Set bundles = new LinkedHashSet<>(); try (DirectoryStream stream = Files.newDirectoryStream(pluginsDirectory)) { for (Path plugin : stream) { @@ -292,47 +318,58 @@ public class PluginsService extends AbstractComponent { + plugin.getFileName() + "]. Was the plugin built before 2.0?", e); } - List urls = new ArrayList<>(); + Set urls = new LinkedHashSet<>(); try (DirectoryStream jarStream = Files.newDirectoryStream(plugin, "*.jar")) { for (Path jar : jarStream) { // normalize with toRealPath to get symlinks out of our hair - urls.add(jar.toRealPath().toUri().toURL()); + URL url = jar.toRealPath().toUri().toURL(); + if (urls.add(url) == false) { + throw new IllegalStateException("duplicate codebase: " + url); + } } } - final Bundle bundle = new Bundle(); - bundles.add(bundle); - bundle.plugins.add(info); - bundle.urls.addAll(urls); + if (bundles.add(new Bundle(info, urls)) == false) { + throw new IllegalStateException("duplicate plugin: " + info); + } } } return bundles; } - private List> loadBundles(List bundles) { + private List> loadBundles(Set bundles) { List> plugins = new ArrayList<>(); for (Bundle bundle : bundles) { // jar-hell check the bundle against the parent classloader // pluginmanager does it, but we do it again, in case lusers mess with jar files manually try { - final List jars = new ArrayList<>(); - jars.addAll(Arrays.asList(JarHell.parseClassPath())); - jars.addAll(bundle.urls); - JarHell.checkJarHell(jars.toArray(new URL[0])); + Set classpath = JarHell.parseClassPath(); + // check we don't have conflicting codebases + Set intersection = new HashSet<>(classpath); + intersection.retainAll(bundle.urls); + if (intersection.isEmpty() == false) { + throw new IllegalStateException("jar hell! duplicate codebases between" + + " plugin and core: " + intersection); + } + // check we don't have conflicting classes + Set union = new HashSet<>(classpath); + union.addAll(bundle.urls); + JarHell.checkJarHell(union); } catch (Exception e) { - throw new IllegalStateException("failed to load bundle " + bundle.urls + " due to jar hell", e); + throw new IllegalStateException("failed to load plugin " + bundle.plugin + + " due to jar hell", e); } - // create a child to load the plugins in this bundle - ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader()); - for (PluginInfo pluginInfo : bundle.plugins) { - // reload lucene SPI with any new services from the plugin - reloadLuceneSPI(loader); - final Class pluginClass = loadPluginClass(pluginInfo.getClassname(), loader); - final Plugin plugin = loadPlugin(pluginClass, settings); - plugins.add(new Tuple<>(pluginInfo, plugin)); - } + // create a child to load the plugin in this bundle + ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), + getClass().getClassLoader()); + // reload lucene SPI with any new services from the plugin + reloadLuceneSPI(loader); + final Class pluginClass = + loadPluginClass(bundle.plugin.getClassname(), loader); + final Plugin plugin = loadPlugin(pluginClass, settings); + plugins.add(new Tuple<>(bundle.plugin, plugin)); } return Collections.unmodifiableList(plugins); diff --git a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java index 5219e50285c..8e81f97d84c 100644 --- a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java @@ -19,12 +19,14 @@ package org.elasticsearch.plugins; +import java.io.IOException; import java.nio.file.AtomicMoveNotSupportedException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import joptsimple.OptionSet; import joptsimple.OptionSpec; @@ -39,36 +41,51 @@ import org.elasticsearch.env.Environment; import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; /** - * A command for the plugin cli to remove a plugin from elasticsearch. + * A command for the plugin CLI to remove a plugin from Elasticsearch. */ class RemovePluginCommand extends EnvironmentAwareCommand { private final OptionSpec arguments; RemovePluginCommand() { - super("Removes a plugin from elasticsearch"); + super("removes a plugin from Elasticsearch"); this.arguments = parser.nonOptions("plugin name"); } @Override - protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { - String arg = arguments.value(options); - execute(terminal, arg, env); + protected void execute(final Terminal terminal, final OptionSet options, final Environment env) + throws Exception { + final String pluginName = arguments.value(options); + execute(terminal, pluginName, env); } - // pkg private for testing - void execute(Terminal terminal, String pluginName, Environment env) throws Exception { + /** + * Remove the plugin specified by {@code pluginName}. + * + * @param terminal the terminal to use for input/output + * @param pluginName the name of the plugin to remove + * @param env the environment for the local node + * @throws IOException if any I/O exception occurs while performing a file operation + * @throws UserException if plugin name is null + * @throws UserException if plugin directory does not exist + * @throws UserException if the plugin bin directory is not a directory + */ + void execute(final Terminal terminal, final String pluginName, final Environment env) + throws IOException, UserException { if (pluginName == null) { throw new UserException(ExitCodes.USAGE, "plugin name is required"); } - terminal.println("-> Removing " + Strings.coalesceToEmpty(pluginName) + "..."); + terminal.println("-> removing [" + Strings.coalesceToEmpty(pluginName) + "]..."); final Path pluginDir = env.pluginsFile().resolve(pluginName); if (Files.exists(pluginDir) == false) { - throw new UserException( - ExitCodes.CONFIG, - "plugin " + pluginName + " not found; run 'elasticsearch-plugin list' to get list of installed plugins"); + final String message = String.format( + Locale.ROOT, + "plugin [%s] not found; " + + "run 'elasticsearch-plugin list' to get list of installed plugins", + pluginName); + throw new UserException(ExitCodes.CONFIG, message); } final List pluginPaths = new ArrayList<>(); @@ -76,30 +93,41 @@ class RemovePluginCommand extends EnvironmentAwareCommand { final Path pluginBinDir = env.binFile().resolve(pluginName); if (Files.exists(pluginBinDir)) { if (Files.isDirectory(pluginBinDir) == false) { - throw new UserException(ExitCodes.IO_ERROR, "Bin dir for " + pluginName + " is not a directory"); + throw new UserException( + ExitCodes.IO_ERROR, "bin dir for " + pluginName + " is not a directory"); } pluginPaths.add(pluginBinDir); - terminal.println(VERBOSE, "Removing: " + pluginBinDir); + terminal.println(VERBOSE, "removing [" + pluginBinDir + "]"); } - terminal.println(VERBOSE, "Removing: " + pluginDir); + terminal.println(VERBOSE, "removing [" + pluginDir + "]"); final Path tmpPluginDir = env.pluginsFile().resolve(".removing-" + pluginName); try { Files.move(pluginDir, tmpPluginDir, StandardCopyOption.ATOMIC_MOVE); } catch (final AtomicMoveNotSupportedException e) { - // this can happen on a union filesystem when a plugin is not installed on the top layer; we fall back to a non-atomic move + /* + * On a union file system if the plugin that we are removing is not installed on the + * top layer then atomic move will not be supported. In this case, we fall back to a + * non-atomic move. + */ Files.move(pluginDir, tmpPluginDir); } pluginPaths.add(tmpPluginDir); IOUtils.rm(pluginPaths.toArray(new Path[pluginPaths.size()])); - // we preserve the config files in case the user is upgrading the plugin, but we print - // a message so the user knows in case they want to remove manually + /* + * We preserve the config files in case the user is upgrading the plugin, but we print a + * message so the user knows in case they want to remove manually. + */ final Path pluginConfigDir = env.configFile().resolve(pluginName); if (Files.exists(pluginConfigDir)) { - terminal.println( - "-> Preserving plugin config files [" + pluginConfigDir + "] in case of upgrade, delete manually if not needed"); + final String message = String.format( + Locale.ROOT, + "-> preserving plugin config files [%s] in case of upgrade; " + + "delete manually if not needed", + pluginConfigDir); + terminal.println(message); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java index 6654deb76fb..1544a01f9f0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java @@ -81,7 +81,7 @@ public class RestClearIndicesCacheAction extends BaseRestHandler { if (Fields.QUERY.match(entry.getKey())) { clearIndicesCacheRequest.queryCache(request.paramAsBoolean(entry.getKey(), clearIndicesCacheRequest.queryCache())); } - if (Fields.REQUEST_CACHE.match(entry.getKey())) { + if (Fields.REQUEST.match(entry.getKey())) { clearIndicesCacheRequest.requestCache(request.paramAsBoolean(entry.getKey(), clearIndicesCacheRequest.requestCache())); } if (Fields.FIELD_DATA.match(entry.getKey())) { @@ -100,7 +100,7 @@ public class RestClearIndicesCacheAction extends BaseRestHandler { public static class Fields { public static final ParseField QUERY = new ParseField("query", "filter", "filter_cache"); - public static final ParseField REQUEST_CACHE = new ParseField("request_cache"); + public static final ParseField REQUEST = new ParseField("request", "request_cache"); public static final ParseField FIELD_DATA = new ParseField("field_data", "fielddata"); public static final ParseField RECYCLER = new ParseField("recycler"); public static final ParseField FIELDS = new ParseField("fields"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java index d2f0f5ec3d2..0c2374045dd 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java @@ -62,6 +62,7 @@ public class RestValidateQueryAction extends BaseRestHandler { validateQueryRequest.explain(request.paramAsBoolean("explain", false)); validateQueryRequest.types(Strings.splitStringByCommaToArray(request.param("type"))); validateQueryRequest.rewrite(request.paramAsBoolean("rewrite", false)); + validateQueryRequest.allShards(request.paramAsBoolean("all_shards", false)); Exception bodyParsingException = null; try { @@ -98,6 +99,9 @@ public class RestValidateQueryAction extends BaseRestHandler { if (explanation.getIndex() != null) { builder.field(INDEX_FIELD, explanation.getIndex()); } + if(explanation.getShard() >= 0) { + builder.field(SHARD_FIELD, explanation.getShard()); + } builder.field(VALID_FIELD, explanation.isValid()); if (explanation.getError() != null) { builder.field(ERROR_FIELD, explanation.getError()); @@ -132,6 +136,7 @@ public class RestValidateQueryAction extends BaseRestHandler { } private static final String INDEX_FIELD = "index"; + private static final String SHARD_FIELD = "shard"; private static final String VALID_FIELD = "valid"; private static final String EXPLANATIONS_FIELD = "explanations"; private static final String ERROR_FIELD = "error"; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java index 563a958109b..4fb6a434c84 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java @@ -185,7 +185,7 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, Na builder.startObject(getName()); } if (this.metaData != null) { - builder.field(CommonFields.META); + builder.field(CommonFields.META.getPreferredName()); builder.map(this.metaData); } doXContentBody(builder, params); @@ -240,18 +240,17 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, Na * Common xcontent fields that are shared among addAggregation */ public static final class CommonFields extends ParseField.CommonFields { - // todo convert these to ParseField - public static final String META = "meta"; - public static final String BUCKETS = "buckets"; - public static final String VALUE = "value"; - public static final String VALUES = "values"; - public static final String VALUE_AS_STRING = "value_as_string"; - public static final String DOC_COUNT = "doc_count"; - public static final String KEY = "key"; - public static final String KEY_AS_STRING = "key_as_string"; - public static final String FROM = "from"; - public static final String FROM_AS_STRING = "from_as_string"; - public static final String TO = "to"; - public static final String TO_AS_STRING = "to_as_string"; + public static final ParseField META = new ParseField("meta"); + public static final ParseField BUCKETS = new ParseField("buckets"); + public static final ParseField VALUE = new ParseField("value"); + public static final ParseField VALUES = new ParseField("values"); + public static final ParseField VALUE_AS_STRING = new ParseField("value_as_string"); + public static final ParseField DOC_COUNT = new ParseField("doc_count"); + public static final ParseField KEY = new ParseField("key"); + public static final ParseField KEY_AS_STRING = new ParseField("key_as_string"); + public static final ParseField FROM = new ParseField("from"); + public static final ParseField FROM_AS_STRING = new ParseField("from_as_string"); + public static final ParseField TO = new ParseField("to"); + public static final ParseField TO_AS_STRING = new ParseField("to_as_string"); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java index 7ce66e4ae44..490c7a36878 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java @@ -131,7 +131,7 @@ public abstract class InternalSingleBucketAggregation extends InternalAggregatio @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.field(CommonFields.DOC_COUNT, docCount); + builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); return builder; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index 3a7dc284ab9..3d0839b7fb4 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -105,8 +105,8 @@ public class InternalAdjacencyMatrix @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(CommonFields.KEY, key); - builder.field(CommonFields.DOC_COUNT, docCount); + builder.field(CommonFields.KEY.getPreferredName(), key); + builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); return builder; @@ -207,7 +207,7 @@ public class InternalAdjacencyMatrix @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.startArray(CommonFields.BUCKETS); + builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (InternalBucket bucket : buckets) { bucket.toXContent(builder, params); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java index bd33f1608bc..51531222725 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java @@ -108,7 +108,7 @@ public class InternalFilters extends InternalMultiBucketAggregation, B exten protected int doHashCode() { return Objects.hash(super.doHashCode(), buckets, format, otherDocCount, showTermDocCountError, shardSize); } + + @Override + public final XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + return doXContentCommon(builder, params, docCountError, otherDocCount, buckets); + } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 3f7844683ca..0fb4ceea33a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -18,9 +18,11 @@ */ package org.elasticsearch.search.aggregations.bucket.terms; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregations; @@ -43,10 +45,11 @@ import static java.util.Collections.unmodifiableList; public abstract class InternalTerms, B extends InternalTerms.Bucket> extends InternalMultiBucketAggregation implements Terms, ToXContent { - protected static final String DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME = "doc_count_error_upper_bound"; - protected static final String SUM_OF_OTHER_DOC_COUNTS = "sum_other_doc_count"; + protected static final ParseField DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME = new ParseField("doc_count_error_upper_bound"); + protected static final ParseField SUM_OF_OTHER_DOC_COUNTS = new ParseField("sum_other_doc_count"); public abstract static class Bucket> extends Terms.Bucket { + /** * Reads a bucket. Should be a constructor reference. */ @@ -141,6 +144,21 @@ public abstract class InternalTerms, B extends Int return newBucket(docCount, aggs, docCountError); } + @Override + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + keyToXContent(builder); + builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); + if (showDocCountError) { + builder.field(InternalTerms.DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError()); + } + aggregations.toXContentInternal(builder, params); + builder.endObject(); + return builder; + } + + protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; + @Override public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) { @@ -319,4 +337,16 @@ public abstract class InternalTerms, B extends Int protected int doHashCode() { return Objects.hash(minDocCount, order, requiredSize); } + + protected static XContentBuilder doXContentCommon(XContentBuilder builder, Params params, + long docCountError, long otherDocCount, List buckets) throws IOException { + builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), docCountError); + builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), otherDocCount); + builder.startArray(CommonFields.BUCKETS.getPreferredName()); + for (Bucket bucket : buckets) { + bucket.toXContent(builder, params); + } + builder.endArray(); + return builder; + } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java index f3339de6738..98aa4825ee7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java @@ -86,18 +86,11 @@ public class LongTerms extends InternalMappedTerms } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CommonFields.KEY, term); + protected final XContentBuilder keyToXContent(XContentBuilder builder) throws IOException { + builder.field(CommonFields.KEY.getPreferredName(), term); if (format != DocValueFormat.RAW) { - builder.field(CommonFields.KEY_AS_STRING, format.format(term)); + builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term)); } - builder.field(CommonFields.DOC_COUNT, getDocCount()); - if (showDocCountError) { - builder.field(InternalTerms.DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME, getDocCountError()); - } - aggregations.toXContentInternal(builder, params); - builder.endObject(); return builder; } @@ -149,18 +142,6 @@ public class LongTerms extends InternalMappedTerms showTermDocCountError, otherDocCount, buckets, docCountError); } - @Override - public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.field(InternalTerms.DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME, docCountError); - builder.field(SUM_OF_OTHER_DOC_COUNTS, otherDocCount); - builder.startArray(CommonFields.BUCKETS); - for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); - } - builder.endArray(); - return builder; - } - @Override protected Bucket[] createBucketsArray(int size) { return new Bucket[size]; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java index 3fd41dc3aed..b48c443fac9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java @@ -85,16 +85,8 @@ public class StringTerms extends InternalMappedTerms SETTING_TV_HIGHLIGHT_MULTI_VALUE = + Setting.boolSetting("search.highlight.term_vector_multi_value", true, Setting.Property.NodeScope); - public static final Setting SETTING_TV_HIGHLIGHT_MULTI_VALUE = Setting.boolSetting("search.highlight.term_vector_multi_value", - true, Setting.Property.NodeScope); private static final String CACHE_KEY = "highlight-fsv"; private final Boolean termVectorMultiValue; @@ -74,11 +75,12 @@ public class FastVectorHighlighter implements Highlighter { FieldMapper mapper = highlighterContext.mapper; if (canHighlight(mapper) == false) { - throw new IllegalArgumentException("the field [" + highlighterContext.fieldName - + "] should be indexed with term vector with position offsets to be used with fast vector highlighter"); + throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + + "] should be indexed with term vector with position offsets to be used with fast vector highlighter"); } - Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; + Encoder encoder = field.fieldOptions().encoder().equals("html") ? + HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; if (!hitContext.cache().containsKey(CACHE_KEY)) { hitContext.cache().put(CACHE_KEY, new HighlighterEntry()); @@ -90,21 +92,21 @@ public class FastVectorHighlighter implements Highlighter { if (field.fieldOptions().requireFieldMatch()) { if (cache.fieldMatchFieldQuery == null) { /* - * we use top level reader to rewrite the query against all readers, with use caching it across hits (and across - * readers...) + * we use top level reader to rewrite the query against all readers, + * with use caching it across hits (and across readers...) */ - cache.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), - true, field.fieldOptions().requireFieldMatch()); + cache.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, + hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); } fieldQuery = cache.fieldMatchFieldQuery; } else { if (cache.noFieldMatchFieldQuery == null) { /* - * we use top level reader to rewrite the query against all readers, with use caching it across hits (and across - * readers...) + * we use top level reader to rewrite the query against all readers, + * with use caching it across hits (and across readers...) */ - cache.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), - true, field.fieldOptions().requireFieldMatch()); + cache.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, + hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); } fieldQuery = cache.noFieldMatchFieldQuery; } @@ -128,7 +130,7 @@ public class FastVectorHighlighter implements Highlighter { } } else { fragListBuilder = field.fieldOptions().fragmentOffset() == -1 ? - new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset()); + new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset()); if (field.fieldOptions().scoreOrdered()) { if (!forceSource && mapper.fieldType().stored()) { fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(), @@ -142,7 +144,8 @@ public class FastVectorHighlighter implements Highlighter { fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } else { - fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(), + fragmentsBuilder = + new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } } @@ -153,8 +156,8 @@ public class FastVectorHighlighter implements Highlighter { entry.fragmentsBuilder = fragmentsBuilder; if (cache.fvh == null) { // parameters to FVH are not requires since: - // first two booleans are not relevant since they are set on the CustomFieldQuery (phrase and fieldMatch) - // fragment builders are used explicitly + // first two booleans are not relevant since they are set on the CustomFieldQuery + // (phrase and fieldMatch) fragment builders are used explicitly cache.fvh = new org.apache.lucene.search.vectorhighlight.FastVectorHighlighter(); } CustomFieldQuery.highlightFilters.set(field.fieldOptions().highlightFilter()); @@ -172,13 +175,14 @@ public class FastVectorHighlighter implements Highlighter { // we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible // Only send matched fields if they were requested to save time. if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), - field.fieldOptions().matchedFields(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, - entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), + mapper.fieldType().name(), field.fieldOptions().matchedFields(), fragmentCharSize, + numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), + field.fieldOptions().postTags(), encoder); } else { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), - fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), - field.fieldOptions().postTags(), encoder); + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), + mapper.fieldType().name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, + entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } if (fragments != null && fragments.length > 0) { @@ -187,11 +191,13 @@ public class FastVectorHighlighter implements Highlighter { int noMatchSize = highlighterContext.field.fieldOptions().noMatchSize(); if (noMatchSize > 0) { - // Essentially we just request that a fragment is built from 0 to noMatchSize using the normal fragmentsBuilder + // Essentially we just request that a fragment is built from 0 to noMatchSize using + // the normal fragmentsBuilder FieldFragList fieldFragList = new SimpleFieldFragList(-1 /*ignored*/); fieldFragList.add(0, noMatchSize, Collections.emptyList()); - fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), - fieldFragList, 1, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); + fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), + mapper.fieldType().name(), fieldFragList, 1, field.fieldOptions().preTags(), + field.fieldOptions().postTags(), encoder); if (fragments != null && fragments.length > 0) { return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); } @@ -200,7 +206,8 @@ public class FastVectorHighlighter implements Highlighter { return null; } catch (Exception e) { - throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); + throw new FetchPhaseExecutionException(context, + "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } } @@ -212,24 +219,31 @@ public class FastVectorHighlighter implements Highlighter { private static BoundaryScanner getBoundaryScanner(Field field) { final FieldOptions fieldOptions = field.fieldOptions(); - final Locale boundaryScannerLocale = fieldOptions.boundaryScannerLocale(); - switch(fieldOptions.boundaryScannerType()) { - case SENTENCE: - if (boundaryScannerLocale != null) { - return new BreakIteratorBoundaryScanner(BreakIterator.getSentenceInstance(boundaryScannerLocale)); - } - return DEFAULT_SENTENCE_BOUNDARY_SCANNER; - case WORD: - if (boundaryScannerLocale != null) { - return new BreakIteratorBoundaryScanner(BreakIterator.getWordInstance(boundaryScannerLocale)); - } - return DEFAULT_WORD_BOUNDARY_SCANNER; - default: - if (fieldOptions.boundaryMaxScan() != SimpleBoundaryScanner.DEFAULT_MAX_SCAN + final Locale boundaryScannerLocale = + fieldOptions.boundaryScannerLocale() != null ? fieldOptions.boundaryScannerLocale() : + Locale.ROOT; + final HighlightBuilder.BoundaryScannerType type = + fieldOptions.boundaryScannerType() != null ? fieldOptions.boundaryScannerType() : + HighlightBuilder.BoundaryScannerType.CHARS; + switch(type) { + case SENTENCE: + if (boundaryScannerLocale != null) { + return new BreakIteratorBoundaryScanner(BreakIterator.getSentenceInstance(boundaryScannerLocale)); + } + return DEFAULT_SENTENCE_BOUNDARY_SCANNER; + case WORD: + if (boundaryScannerLocale != null) { + return new BreakIteratorBoundaryScanner(BreakIterator.getWordInstance(boundaryScannerLocale)); + } + return DEFAULT_WORD_BOUNDARY_SCANNER; + case CHARS: + if (fieldOptions.boundaryMaxScan() != SimpleBoundaryScanner.DEFAULT_MAX_SCAN || fieldOptions.boundaryChars() != SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) { - return new SimpleBoundaryScanner(fieldOptions.boundaryMaxScan(), fieldOptions.boundaryChars()); - } - return DEFAULT_SIMPLE_BOUNDARY_SCANNER; + return new SimpleBoundaryScanner(fieldOptions.boundaryMaxScan(), fieldOptions.boundaryChars()); + } + return DEFAULT_SIMPLE_BOUNDARY_SCANNER; + default: + throw new IllegalArgumentException("Invalid boundary scanner type: " + type.toString()); } } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java index 45b8c612a76..c7c9c547b51 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java @@ -95,7 +95,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder 0); + field.fieldOptions().boundaryScannerLocale(), breakIterator, fieldValue, + field.fieldOptions().noMatchSize()); numberOfFragments = fieldValues.size(); // we are highlighting the whole content, one snippet per value } else { //using paragraph separator we make sure that each field value holds a discrete passage for highlighting - String fieldValue = mergeFieldValues(fieldValues, HighlightUtils.PARAGRAPH_SEPARATOR); + String fieldValue = mergeFieldValues(fieldValues, MULTIVAL_SEP_CHAR); + BreakIterator bi = getBreakIterator(field); highlighter = new CustomUnifiedHighlighter(searcher, analyzer, - mapperHighlighterEntry.passageFormatter, null, fieldValue, field.fieldOptions().noMatchSize() > 0); + mapperHighlighterEntry.passageFormatter, field.fieldOptions().boundaryScannerLocale(), bi, + fieldValue, field.fieldOptions().noMatchSize()); numberOfFragments = field.fieldOptions().numberOfFragments(); } if (field.fieldOptions().requireFieldMatch()) { @@ -144,11 +151,34 @@ public class UnifiedHighlighter implements Highlighter { return null; } - static class HighlighterEntry { + private BreakIterator getBreakIterator(SearchContextHighlight.Field field) { + final SearchContextHighlight.FieldOptions fieldOptions = field.fieldOptions(); + final Locale locale = + fieldOptions.boundaryScannerLocale() != null ? fieldOptions.boundaryScannerLocale() : + Locale.ROOT; + final HighlightBuilder.BoundaryScannerType type = + fieldOptions.boundaryScannerType() != null ? fieldOptions.boundaryScannerType() : + HighlightBuilder.BoundaryScannerType.SENTENCE; + int maxLen = fieldOptions.fragmentCharSize(); + switch (type) { + case SENTENCE: + if (maxLen > 0) { + return BoundedBreakIteratorScanner.getSentence(locale, maxLen); + } + return BreakIterator.getSentenceInstance(locale); + case WORD: + // ignore maxLen + return BreakIterator.getWordInstance(locale); + default: + throw new IllegalArgumentException("Invalid boundary scanner type: " + type.toString()); + } + } + + private static class HighlighterEntry { Map mappers = new HashMap<>(); } - static class MapperHighlighterEntry { + private static class MapperHighlighterEntry { final CustomPassageFormatter passageFormatter; private MapperHighlighterEntry(CustomPassageFormatter passageFormatter) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java index a4aeec8cb58..b12b90de107 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.search.suggest.document.CompletionQuery; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.query.QueryShardContext; @@ -77,15 +78,7 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest CompletionFieldMapper.CompletionFieldType fieldType = getFieldType(); final CompletionQuery query; if (getPrefix() != null) { - if (fuzzyOptions != null) { - query = fieldType.fuzzyQuery(getPrefix().utf8ToString(), - Fuzziness.fromEdits(fuzzyOptions.getEditDistance()), - fuzzyOptions.getFuzzyPrefixLength(), fuzzyOptions.getFuzzyMinLength(), - fuzzyOptions.getMaxDeterminizedStates(), fuzzyOptions.isTranspositions(), - fuzzyOptions.isUnicodeAware()); - } else { - query = fieldType.prefixQuery(getPrefix()); - } + query = createCompletionQuery(getPrefix(), fieldType); } else if (getRegex() != null) { if (fuzzyOptions != null) { throw new IllegalArgumentException("can not use 'fuzzy' options with 'regex"); @@ -95,8 +88,10 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest } query = fieldType.regexpQuery(getRegex(), regexOptions.getFlagsValue(), regexOptions.getMaxDeterminizedStates()); + } else if (getText() != null) { + query = createCompletionQuery(getText(), fieldType); } else { - throw new IllegalArgumentException("'prefix' or 'regex' must be defined"); + throw new IllegalArgumentException("'prefix/text' or 'regex' must be defined"); } if (fieldType.hasContextMappings()) { ContextMappings contextMappings = fieldType.getContextMappings(); @@ -105,4 +100,18 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest return query; } + private CompletionQuery createCompletionQuery(BytesRef prefix, CompletionFieldMapper.CompletionFieldType fieldType) { + final CompletionQuery query; + if (fuzzyOptions != null) { + query = fieldType.fuzzyQuery(prefix.utf8ToString(), + Fuzziness.fromEdits(fuzzyOptions.getEditDistance()), + fuzzyOptions.getFuzzyPrefixLength(), fuzzyOptions.getFuzzyMinLength(), + fuzzyOptions.getMaxDeterminizedStates(), fuzzyOptions.isTranspositions(), + fuzzyOptions.isUnicodeAware()); + } else { + query = fieldType.prefixQuery(prefix); + } + return query; + } + } diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index d6f8cf8b586..2a615649fcf 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -1150,11 +1150,24 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus @Override public void onSnapshotFailure(Snapshot failedSnapshot, Exception e) { if (failedSnapshot.equals(snapshot)) { - logger.trace("deleted snapshot failed - deleting files", e); + logger.warn("deleted snapshot failed - deleting files", e); removeListener(this); - threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> - deleteSnapshot(failedSnapshot.getRepository(), failedSnapshot.getSnapshotId().getName(), listener, true) - ); + threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> { + try { + deleteSnapshot(failedSnapshot.getRepository(), + failedSnapshot.getSnapshotId().getName(), + listener, + true); + } catch (SnapshotMissingException smex) { + logger.info((Supplier) () -> new ParameterizedMessage( + "Tried deleting in-progress snapshot [{}], but it " + + "could not be found after failing to abort.", + smex.getSnapshotName()), e); + listener.onFailure(new SnapshotException(snapshot, + "Tried deleting in-progress snapshot [{}], but it " + + "could not be found after failing to abort.", smex)); + } + }); } } }); diff --git a/core/src/main/java/org/elasticsearch/tasks/TaskAwareRequest.java b/core/src/main/java/org/elasticsearch/tasks/TaskAwareRequest.java new file mode 100644 index 00000000000..a2364ac8e40 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/tasks/TaskAwareRequest.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tasks; + +/** + * An interface for a request that can be used to register a task manager task + */ +public interface TaskAwareRequest { + /** + * Set a reference to task that caused this task to be run. + */ + default void setParentTask(String parentTaskNode, long parentTaskId) { + setParentTask(new TaskId(parentTaskNode, parentTaskId)); + } + + /** + * Set a reference to task that created this request. + */ + void setParentTask(TaskId taskId); + + /** + * Get a reference to the task that created this request. Implementers should default to + * {@link TaskId#EMPTY_TASK_ID}, meaning "there is no parent". + */ + TaskId getParentTask(); + + /** + * Returns the task object that should be used to keep track of the processing of the request. + * + * A request can override this method and return null to avoid being tracked by the task + * manager. + */ + default Task createTask(long id, String type, String action, TaskId parentTaskId) { + return new Task(id, type, action, getDescription(), parentTaskId); + } + + /** + * Returns optional description of the request to be displayed by the task manager + */ + default String getDescription() { + return ""; + } +} diff --git a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java index bf62b5bb013..afeeeeedd11 100644 --- a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java +++ b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -35,18 +35,14 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; -import org.elasticsearch.transport.TransportRequest; import java.io.IOException; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.Map; -import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Consumer; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; @@ -83,7 +79,7 @@ public class TaskManager extends AbstractComponent implements ClusterStateApplie *

* Returns the task manager tracked task or null if the task doesn't support the task manager */ - public Task register(String type, String action, TransportRequest request) { + public Task register(String type, String action, TaskAwareRequest request) { Task task = request.createTask(taskIdGenerator.incrementAndGet(), type, action, request.getParentTask()); if (task == null) { return null; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java index 54f3a228a81..c42ec24ad15 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java @@ -21,12 +21,12 @@ package org.elasticsearch.transport; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskAwareRequest; import org.elasticsearch.tasks.TaskId; import java.io.IOException; -public abstract class TransportRequest extends TransportMessage { +public abstract class TransportRequest extends TransportMessage implements TaskAwareRequest { public static class Empty extends TransportRequest { public static final Empty INSTANCE = new Empty(); } @@ -39,16 +39,10 @@ public abstract class TransportRequest extends TransportMessage { public TransportRequest() { } - /** - * Set a reference to task that caused this task to be run. - */ - public void setParentTask(String parentTaskNode, long parentTaskId) { - setParentTask(new TaskId(parentTaskNode, parentTaskId)); - } - /** * Set a reference to task that created this request. */ + @Override public void setParentTask(TaskId taskId) { this.parentTaskId = taskId; } @@ -56,26 +50,11 @@ public abstract class TransportRequest extends TransportMessage { /** * Get a reference to the task that created this request. Defaults to {@link TaskId#EMPTY_TASK_ID}, meaning "there is no parent". */ + @Override public TaskId getParentTask() { return parentTaskId; } - /** - * Returns the task object that should be used to keep track of the processing of the request. - * - * A request can override this method and return null to avoid being tracked by the task manager. - */ - public Task createTask(long id, String type, String action, TaskId parentTaskId) { - return new Task(id, type, action, getDescription(), parentTaskId); - } - - /** - * Returns optional description of the request to be displayed by the task manager - */ - public String getDescription() { - return ""; - } - @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); diff --git a/core/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java b/core/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java new file mode 100644 index 00000000000..0cf62e8ce6c --- /dev/null +++ b/core/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.search.uhighlight; + +import org.elasticsearch.test.ESTestCase; + +import java.text.BreakIterator; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; + +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +public class BoundedBreakIteratorScannerTests extends ESTestCase { + private static final String[] WORD_BOUNDARIES = + new String[] { " ", " ", "\t", "#", "\n" }; + private static final String[] SENTENCE_BOUNDARIES = + new String[] { "! ", "? ", ". ", ".\n", ".\n\n" }; + + private void testRandomAsciiTextCase(BreakIterator bi, int maxLen) { + // Generate a random set of unique terms with ascii character + int maxSize = randomIntBetween(5, 100); + String[] vocabulary = new String[maxSize]; + for (int i = 0; i < maxSize; i++) { + if (rarely()) { + vocabulary[i] = randomAsciiOfLengthBetween(50, 200); + } else { + vocabulary[i] = randomAsciiOfLengthBetween(1, 30); + } + } + + // Generate a random text made of random terms separated with word-boundaries + // and sentence-boundaries. + StringBuilder text = new StringBuilder(); + List offsetList = new ArrayList<> (); + List sizeList = new ArrayList<> (); + // the number of sentences to generate + int numSentences = randomIntBetween(10, 100); + int maxTermLen = 0; + for (int i = 0; i < numSentences; i++) { + // the number of terms in the sentence + int numTerms = randomIntBetween(5, 10); + for (int j = 0; j < numTerms; j++) { + int termId = randomIntBetween(0, vocabulary.length - 1); + String term = vocabulary[termId].toLowerCase(Locale.ROOT); + if (j == 0) { + // capitalize the first letter of the first term in the sentence + term = term.substring(0, 1).toUpperCase(Locale.ROOT) + term.substring(1); + } else { + String sep = randomFrom(WORD_BOUNDARIES); + text.append(sep); + } + maxTermLen = Math.max(term.length(), maxTermLen); + offsetList.add(text.length()); + sizeList.add(term.length()); + text.append(term); + } + String boundary = randomFrom(SENTENCE_BOUNDARIES); + text.append(boundary); + } + + int[] sizes = sizeList.stream().mapToInt(i->i).toArray(); + int[] offsets = offsetList.stream().mapToInt(i->i).toArray(); + + bi.setText(text.toString()); + int currentPos = randomIntBetween(0, 20); + int lastEnd = -1; + int maxPassageLen = maxLen+(maxTermLen*2); + while (currentPos < offsets.length) { + // find the passage that contains the current term + int nextOffset = offsets[currentPos]; + int start = bi.preceding(nextOffset+1); + int end = bi.following(nextOffset); + + // check that the passage is valid + assertThat(start, greaterThanOrEqualTo(lastEnd)); + assertThat(end, greaterThan(start)); + assertThat(start, lessThanOrEqualTo(nextOffset)); + assertThat(end, greaterThanOrEqualTo(nextOffset)); + int passageLen = end-start; + assertThat(passageLen, lessThanOrEqualTo(maxPassageLen)); + + // checks that the start and end of the passage are on word boundaries. + int startPos = Arrays.binarySearch(offsets, start); + int endPos = Arrays.binarySearch(offsets, end); + if (startPos < 0) { + int lastWordEnd = + offsets[Math.abs(startPos)-2] + sizes[Math.abs(startPos)-2]; + assertThat(start, greaterThanOrEqualTo(lastWordEnd)); + } + if (endPos < 0) { + if (Math.abs(endPos)-2 < offsets.length) { + int lastWordEnd = + offsets[Math.abs(endPos) - 2] + sizes[Math.abs(endPos) - 2]; + assertThat(end, greaterThanOrEqualTo(lastWordEnd)); + } + // advance the position to the end of the current passage + currentPos = (Math.abs(endPos) - 1); + } else { + // advance the position to the end of the current passage + currentPos = endPos; + } + // randomly advance to the next term to highlight + currentPos += randomIntBetween(0, 20); + lastEnd = end; + } + } + + public void testBoundedSentence() { + for (int i = 0; i < 20; i++) { + int maxLen = randomIntBetween(10, 500); + testRandomAsciiTextCase( + BoundedBreakIteratorScanner.getSentence(Locale.ROOT, maxLen), + maxLen + ); + } + } +} diff --git a/core/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/core/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index 83b42750f92..23e867d2573 100644 --- a/core/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/core/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -20,20 +20,22 @@ package org.apache.lucene.search.uhighlight; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.TextField; +import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.queries.CommonTermsQuery; import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TermQuery; @@ -41,219 +43,167 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.highlight.DefaultEncoder; import org.apache.lucene.search.highlight.Snippet; import org.apache.lucene.store.Directory; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.all.AllTermQuery; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; -import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; +import java.text.BreakIterator; +import java.util.Locale; +import static org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter.MULTIVAL_SEP_CHAR; import static org.hamcrest.CoreMatchers.equalTo; public class CustomUnifiedHighlighterTests extends ESTestCase { - public void testCustomUnifiedHighlighter() throws Exception { + private void assertHighlightOneDoc(String fieldName, String[] inputs, Analyzer analyzer, Query query, + Locale locale, BreakIterator breakIterator, + int noMatchSize, String[] expectedPassages) throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); - iwc.setMergePolicy(newLogMergePolicy()); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); + iwc.setMergePolicy(newTieredMergePolicy(random())); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); - - FieldType offsetsType = new FieldType(TextField.TYPE_STORED); - offsetsType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); - offsetsType.setStoreTermVectorOffsets(true); - offsetsType.setStoreTermVectorPositions(true); - offsetsType.setStoreTermVectors(true); - - //good position but only one match - final String firstValue = "This is a test. Just a test1 highlighting from unified highlighter."; - Field body = new Field("body", "", offsetsType); + FieldType ft = new FieldType(TextField.TYPE_STORED); + ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + ft.freeze(); Document doc = new Document(); - doc.add(body); - body.setStringValue(firstValue); - - //two matches, not the best snippet due to its length though - final String secondValue = "This is the second highlighting value to perform highlighting on a longer text " + - "that gets scored lower."; - Field body2 = new Field("body", "", offsetsType); - doc.add(body2); - body2.setStringValue(secondValue); - - //two matches and short, will be scored highest - final String thirdValue = "This is highlighting the third short highlighting value."; - Field body3 = new Field("body", "", offsetsType); - doc.add(body3); - body3.setStringValue(thirdValue); - - //one match, same as first but at the end, will be scored lower due to its position - final String fourthValue = "Just a test4 highlighting from unified highlighter."; - Field body4 = new Field("body", "", offsetsType); - doc.add(body4); - body4.setStringValue(fourthValue); - + for (String input : inputs) { + Field field = new Field(fieldName, "", ft); + field.setStringValue(input); + doc.add(field); + } iw.addDocument(doc); - - IndexReader ir = iw.getReader(); + DirectoryReader reader = iw.getReader(); + IndexSearcher searcher = newSearcher(reader); iw.close(); - - String firstHlValue = "Just a test1 highlighting from unified highlighter."; - String secondHlValue = "This is the second highlighting value to perform highlighting on a" + - " longer text that gets scored lower."; - String thirdHlValue = "This is highlighting the third short highlighting value."; - String fourthHlValue = "Just a test4 highlighting from unified highlighter."; - - IndexSearcher searcher = newSearcher(ir); - Query query = new TermQuery(new Term("body", "highlighting")); - - TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); assertThat(topDocs.totalHits, equalTo(1)); - - int docId = topDocs.scoreDocs[0].doc; - - String fieldValue = firstValue + HighlightUtils.PARAGRAPH_SEPARATOR + secondValue + - HighlightUtils.PARAGRAPH_SEPARATOR + thirdValue + HighlightUtils.PARAGRAPH_SEPARATOR + fourthValue; - - CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, iwc.getAnalyzer(), - new CustomPassageFormatter("", "", new DefaultEncoder()), null, fieldValue, true); - Snippet[] snippets = highlighter.highlightField("body", query, docId, 5); - - assertThat(snippets.length, equalTo(4)); - - assertThat(snippets[0].getText(), equalTo(firstHlValue)); - assertThat(snippets[1].getText(), equalTo(secondHlValue)); - assertThat(snippets[2].getText(), equalTo(thirdHlValue)); - assertThat(snippets[3].getText(), equalTo(fourthHlValue)); - ir.close(); + String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR)); + CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, analyzer, + new CustomPassageFormatter("", "", new DefaultEncoder()), locale, breakIterator, rawValue, + noMatchSize); + highlighter.setFieldMatcher((name) -> "text".equals(name)); + final Snippet[] snippets = + highlighter.highlightField("text", query, topDocs.scoreDocs[0].doc, expectedPassages.length); + assertEquals(snippets.length, expectedPassages.length); + for (int i = 0; i < snippets.length; i++) { + assertEquals(snippets[i].getText(), expectedPassages[i]); + } + reader.close(); dir.close(); } + public void testSimple() throws Exception { + final String[] inputs = { + "This is a test. Just a test1 highlighting from unified highlighter.", + "This is the second highlighting value to perform highlighting on a longer text that gets scored lower.", + "This is highlighting the third short highlighting value.", + "Just a test4 highlighting from unified highlighter." + }; + + String[] expectedPassages = { + "Just a test1 highlighting from unified highlighter.", + "This is the second highlighting value to perform highlighting on a" + + " longer text that gets scored lower.", + "This is highlighting the third short highlighting value.", + "Just a test4 highlighting from unified highlighter." + }; + Query query = new TermQuery(new Term("text", "highlighting")); + assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT, + BreakIterator.getSentenceInstance(Locale.ROOT), 0, expectedPassages); + } + public void testNoMatchSize() throws Exception { - Directory dir = newDirectory(); - Analyzer analyzer = new StandardAnalyzer(); - IndexWriterConfig iwc = newIndexWriterConfig(analyzer); - iwc.setMergePolicy(newLogMergePolicy()); - RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); - - FieldType offsetsType = new FieldType(TextField.TYPE_STORED); - offsetsType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); - offsetsType.setStoreTermVectorOffsets(true); - offsetsType.setStoreTermVectorPositions(true); - offsetsType.setStoreTermVectors(true); - Field body = new Field("body", "", offsetsType); - Field none = new Field("none", "", offsetsType); - Document doc = new Document(); - doc.add(body); - doc.add(none); - - String firstValue = "This is a test. Just a test highlighting from unified. Feel free to ignore."; - body.setStringValue(firstValue); - none.setStringValue(firstValue); - iw.addDocument(doc); - - IndexReader ir = iw.getReader(); - iw.close(); - - Query query = new TermQuery(new Term("none", "highlighting")); - - IndexSearcher searcher = newSearcher(ir); - TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); - assertThat(topDocs.totalHits, equalTo(1)); - int docId = topDocs.scoreDocs[0].doc; - - CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new DefaultEncoder()); - CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, analyzer, passageFormatter, - null, firstValue, false); - Snippet[] snippets = highlighter.highlightField("body", query, docId, 5); - assertThat(snippets.length, equalTo(0)); - - highlighter = new CustomUnifiedHighlighter(searcher, analyzer, passageFormatter, null, firstValue, true); - snippets = highlighter.highlightField("body", query, docId, 5); - assertThat(snippets.length, equalTo(1)); - assertThat(snippets[0].getText(), equalTo("This is a test.")); - ir.close(); - dir.close(); - } - - - private IndexReader indexOneDoc(Directory dir, String field, String value, Analyzer analyzer) throws IOException { - IndexWriterConfig iwc = newIndexWriterConfig(analyzer); - iwc.setMergePolicy(newLogMergePolicy()); - RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); - - FieldType ft = new FieldType(TextField.TYPE_STORED); - ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); - Field textField = new Field(field, "", ft); - Document doc = new Document(); - doc.add(textField); - - textField.setStringValue(value); - iw.addDocument(doc); - IndexReader ir = iw.getReader(); - iw.close(); - return ir; + final String[] inputs = { + "This is a test. Just a test highlighting from unified. Feel free to ignore." + }; + Query query = new TermQuery(new Term("body", "highlighting")); + assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT, + BreakIterator.getSentenceInstance(Locale.ROOT), 100, inputs); } public void testMultiPhrasePrefixQuery() throws Exception { - Analyzer analyzer = new StandardAnalyzer(); - Directory dir = newDirectory(); - String value = "The quick brown fox."; - IndexReader ir = indexOneDoc(dir, "text", value, analyzer); + final String[] inputs = { + "The quick brown fox." + }; + final String[] outputs = { + "The quick brown fox." + }; MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery(); query.add(new Term("text", "quick")); query.add(new Term("text", "brown")); query.add(new Term("text", "fo")); - IndexSearcher searcher = newSearcher(ir); - TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); - assertThat(topDocs.totalHits, equalTo(1)); - int docId = topDocs.scoreDocs[0].doc; - CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new DefaultEncoder()); - CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, analyzer, - passageFormatter, null, value, false); - Snippet[] snippets = highlighter.highlightField("text", query, docId, 5); - assertThat(snippets.length, equalTo(1)); - assertThat(snippets[0].getText(), equalTo("The quick brown fox.")); - ir.close(); - dir.close(); + assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT, + BreakIterator.getSentenceInstance(Locale.ROOT), 0, outputs); } - public void testAllTermQuery() throws IOException { - Directory dir = newDirectory(); - String value = "The quick brown fox."; - Analyzer analyzer = new StandardAnalyzer(); - IndexReader ir = indexOneDoc(dir, "all", value, analyzer); - AllTermQuery query = new AllTermQuery(new Term("all", "fox")); - IndexSearcher searcher = newSearcher(ir); - TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); - assertThat(topDocs.totalHits, equalTo(1)); - int docId = topDocs.scoreDocs[0].doc; - CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new DefaultEncoder()); - CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, analyzer, - passageFormatter, null, value, false); - Snippet[] snippets = highlighter.highlightField("all", query, docId, 5); - assertThat(snippets.length, equalTo(1)); - assertThat(snippets[0].getText(), equalTo("The quick brown fox.")); - ir.close(); - dir.close(); + public void testAllTermQuery() throws Exception { + final String[] inputs = { + "The quick brown fox." + }; + final String[] outputs = { + "The quick brown fox." + }; + AllTermQuery query = new AllTermQuery(new Term("text", "fox")); + assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT, + BreakIterator.getSentenceInstance(Locale.ROOT), 0, outputs); } - public void testCommonTermsQuery() throws IOException { - Directory dir = newDirectory(); - String value = "The quick brown fox."; - Analyzer analyzer = new StandardAnalyzer(); - IndexReader ir = indexOneDoc(dir, "text", value, analyzer); + public void testCommonTermsQuery() throws Exception { + final String[] inputs = { + "The quick brown fox." + }; + final String[] outputs = { + "The quick brown fox." + }; CommonTermsQuery query = new CommonTermsQuery(BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, 128); query.add(new Term("text", "quick")); query.add(new Term("text", "brown")); query.add(new Term("text", "fox")); - IndexSearcher searcher = newSearcher(ir); - TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); - assertThat(topDocs.totalHits, equalTo(1)); - int docId = topDocs.scoreDocs[0].doc; - CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new DefaultEncoder()); - CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, analyzer, - passageFormatter, null, value, false); - Snippet[] snippets = highlighter.highlightField("text", query, docId, 5); - assertThat(snippets.length, equalTo(1)); - assertThat(snippets[0].getText(), equalTo("The quick brown fox.")); - ir.close(); - dir.close(); + assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT, + BreakIterator.getSentenceInstance(Locale.ROOT), 0, outputs); + } + + public void testSentenceBoundedBreakIterator() throws Exception { + final String[] inputs = { + "The quick brown fox in a long sentence with another quick brown fox. " + + "Another sentence with brown fox." + }; + final String[] outputs = { + "The quick brown", + "fox in a long", + "with another quick", + "brown fox.", + "sentence with brown", + "fox.", + }; + BooleanQuery query = new BooleanQuery.Builder() + .add(new TermQuery(new Term("text", "quick")), BooleanClause.Occur.SHOULD) + .add(new TermQuery(new Term("text", "brown")), BooleanClause.Occur.SHOULD) + .add(new TermQuery(new Term("text", "fox")), BooleanClause.Occur.SHOULD) + .build(); + assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT, + BoundedBreakIteratorScanner.getSentence(Locale.ROOT, 10), 0, outputs); + } + + public void testRepeat() throws Exception { + final String[] inputs = { + "Fun fun fun fun fun fun fun fun fun fun" + }; + final String[] outputs = { + "Fun fun fun", + "fun fun", + "fun fun fun", + "fun fun" + }; + Query query = new TermQuery(new Term("text", "fun")); + assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT, + BoundedBreakIteratorScanner.getSentence(Locale.ROOT, 10), 0, outputs); + + query = new PhraseQuery.Builder() + .add(new Term("text", "fun")) + .add(new Term("text", "fun")) + .build(); + assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT, + BoundedBreakIteratorScanner.getSentence(Locale.ROOT, 10), 0, outputs); } } diff --git a/core/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTookTests.java b/core/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTookTests.java new file mode 100644 index 00000000000..cd8b0743675 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTookTests.java @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +public class AbstractSearchAsyncActionTookTests extends ESTestCase { + + private AbstractSearchAsyncAction createAction( + final boolean controlled, + final AtomicLong expected) { + + final Runnable runnable; + final TransportSearchAction.SearchTimeProvider timeProvider; + if (controlled) { + runnable = () -> expected.set(randomNonNegativeLong()); + timeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, expected::get); + } else { + runnable = () -> { + long elapsed = spinForAtLeastNMilliseconds(randomIntBetween(1, 10)); + expected.set(elapsed); + }; + timeProvider = new TransportSearchAction.SearchTimeProvider( + 0, + System.nanoTime(), + System::nanoTime); + } + + final ShardIterator it = new ShardIterator() { + @Override + public ShardId shardId() { + return null; + } + + @Override + public void reset() { + + } + + @Override + public int compareTo(ShardIterator o) { + return 0; + } + + @Override + public int size() { + return 0; + } + + @Override + public int sizeActive() { + return 0; + } + + @Override + public ShardRouting nextOrNull() { + return null; + } + + @Override + public int remaining() { + return 0; + } + + @Override + public Iterable asUnordered() { + return null; + } + }; + + return new AbstractSearchAsyncAction( + "test", + null, + null, + null, + null, + null, + null, + null, + null, + new GroupShardsIterator(Collections.singletonList(it)), + timeProvider, + 0, + null, + null + ) { + @Override + protected SearchPhase getNextPhase( + final SearchPhaseResults results, + final SearchPhaseContext context) { + return null; + } + + @Override + protected void executePhaseOnShard( + final ShardIterator shardIt, + final ShardRouting shard, + final ActionListener listener) { + + } + + @Override + long buildTookInMillis() { + runnable.run(); + return super.buildTookInMillis(); + } + }; + } + + public void testTookWithControlledClock() { + runTestTook(true); + } + + public void testTookWithRealClock() { + runTestTook(false); + } + + private void runTestTook(final boolean controlled) { + final AtomicLong expected = new AtomicLong(); + AbstractSearchAsyncAction action = createAction(controlled, expected); + final long actual = action.buildTookInMillis(); + if (controlled) { + // with a controlled clock, we can assert the exact took time + assertThat(actual, equalTo(TimeUnit.NANOSECONDS.toMillis(expected.get()))); + } else { + // with a real clock, the best we can say is that it took as long as we spun for + assertThat(actual, greaterThanOrEqualTo(TimeUnit.NANOSECONDS.toMillis(expected.get()))); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 9b7fad265bf..53e4eb59ae5 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -92,9 +92,22 @@ public class SearchAsyncActionTests extends ESTestCase { lookup.put(primaryNode.getId(), new MockConnection(primaryNode)); lookup.put(replicaNode.getId(), new MockConnection(replicaNode)); Map aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY)); - AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction("test", logger, transportService, - lookup::get, aliasFilters, Collections.emptyMap(), null, request, responseListener, shardsIter, 0, 0, null, - new InitialSearchPhase.SearchPhaseResults<>(shardsIter.size())) { + AbstractSearchAsyncAction asyncAction = + new AbstractSearchAsyncAction( + "test", + logger, + transportService, + lookup::get, + aliasFilters, + Collections.emptyMap(), + null, + request, + responseListener, + shardsIter, + new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0), + 0, + null, + new InitialSearchPhase.SearchPhaseResults<>(shardsIter.size())) { TestSearchResponse response = new TestSearchResponse(); @Override diff --git a/core/src/test/java/org/elasticsearch/action/support/AdapterActionFutureTests.java b/core/src/test/java/org/elasticsearch/action/support/AdapterActionFutureTests.java new file mode 100644 index 00000000000..a7405ddae8c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/support/AdapterActionFutureTests.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import java.util.Objects; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +public class AdapterActionFutureTests extends ESTestCase { + + public void testInterruption() throws Exception { + final AdapterActionFuture adapter = + new AdapterActionFuture() { + @Override + protected String convert(final Integer listenerResponse) { + return Objects.toString(listenerResponse); + } + }; + + // test all possible methods that can be interrupted + final Runnable runnable = () -> { + final int method = randomIntBetween(0, 4); + switch (method) { + case 0: + adapter.actionGet(); + break; + case 1: + adapter.actionGet("30s"); + break; + case 2: + adapter.actionGet(30000); + break; + case 3: + adapter.actionGet(TimeValue.timeValueSeconds(30)); + break; + case 4: + adapter.actionGet(30, TimeUnit.SECONDS); + break; + default: + throw new AssertionError(method); + } + }; + + final CyclicBarrier barrier = new CyclicBarrier(2); + final Thread main = Thread.currentThread(); + final Thread thread = new Thread(() -> { + try { + barrier.await(); + } catch (final BrokenBarrierException | InterruptedException e) { + throw new RuntimeException(e); + } + main.interrupt(); + }); + thread.start(); + + final AtomicBoolean interrupted = new AtomicBoolean(); + + barrier.await(); + + try { + runnable.run(); + } catch (final IllegalStateException e) { + interrupted.set(Thread.interrupted()); + } + // we check this here instead of in the catch block to ensure that the catch block executed + assertTrue(interrupted.get()); + + thread.join(); + } + +} diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java similarity index 99% rename from core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java rename to core/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java index 881d03b3dc6..bd553cff6e1 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java @@ -49,7 +49,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; -public class BootstrapCheckTests extends ESTestCase { +public class BootstrapChecksTests extends ESTestCase { public void testNonProductionMode() throws NodeValidationException { // nothing should happen since we are in non-production mode diff --git a/core/src/test/java/org/elasticsearch/bootstrap/JarHellTests.java b/core/src/test/java/org/elasticsearch/bootstrap/JarHellTests.java index d38d346d6c1..7003ef3d81e 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/JarHellTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/JarHellTests.java @@ -30,7 +30,9 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.jar.Attributes; import java.util.jar.JarOutputStream; import java.util.jar.Manifest; @@ -62,7 +64,8 @@ public class JarHellTests extends ESTestCase { public void testDifferentJars() throws Exception { Path dir = createTempDir(); - URL[] jars = {makeJar(dir, "foo.jar", null, "DuplicateClass.class"), makeJar(dir, "bar.jar", null, "DuplicateClass.class")}; + Set jars = asSet(makeJar(dir, "foo.jar", null, "DuplicateClass.class"), + makeJar(dir, "bar.jar", null, "DuplicateClass.class")); try { JarHell.checkJarHell(jars); fail("did not get expected exception"); @@ -74,17 +77,11 @@ public class JarHellTests extends ESTestCase { } } - public void testDuplicateClasspathLeniency() throws Exception { - Path dir = createTempDir(); - URL jar = makeJar(dir, "foo.jar", null, "Foo.class"); - URL[] jars = {jar, jar}; - JarHell.checkJarHell(jars); - } - public void testDirsOnClasspath() throws Exception { Path dir1 = createTempDir(); Path dir2 = createTempDir(); - URL[] dirs = {makeFile(dir1, "DuplicateClass.class"), makeFile(dir2, "DuplicateClass.class")}; + Set dirs = asSet(makeFile(dir1, "DuplicateClass.class"), + makeFile(dir2, "DuplicateClass.class")); try { JarHell.checkJarHell(dirs); fail("did not get expected exception"); @@ -99,7 +96,8 @@ public class JarHellTests extends ESTestCase { public void testDirAndJar() throws Exception { Path dir1 = createTempDir(); Path dir2 = createTempDir(); - URL[] dirs = {makeJar(dir1, "foo.jar", null, "DuplicateClass.class"), makeFile(dir2, "DuplicateClass.class")}; + Set dirs = asSet(makeJar(dir1, "foo.jar", null, "DuplicateClass.class"), + makeFile(dir2, "DuplicateClass.class")); try { JarHell.checkJarHell(dirs); fail("did not get expected exception"); @@ -113,8 +111,8 @@ public class JarHellTests extends ESTestCase { public void testWithinSingleJar() throws Exception { // the java api for zip file does not allow creating duplicate entries (good!) so - // this bogus jar had to be constructed with ant - URL[] jars = {JarHellTests.class.getResource("duplicate-classes.jar")}; + // this bogus jar had to be with https://github.com/jasontedor/duplicate-classes + Set jars = Collections.singleton(JarHellTests.class.getResource("duplicate-classes.jar")); try { JarHell.checkJarHell(jars); fail("did not get expected exception"); @@ -127,7 +125,7 @@ public class JarHellTests extends ESTestCase { } public void testXmlBeansLeniency() throws Exception { - URL[] jars = {JarHellTests.class.getResource("duplicate-xmlbeans-classes.jar")}; + Set jars = Collections.singleton(JarHellTests.class.getResource("duplicate-xmlbeans-classes.jar")); JarHell.checkJarHell(jars); } @@ -145,7 +143,7 @@ public class JarHellTests extends ESTestCase { Attributes attributes = manifest.getMainAttributes(); attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0"); attributes.put(new Attributes.Name("X-Compile-Target-JDK"), targetVersion.toString()); - URL[] jars = {makeJar(dir, "foo.jar", manifest, "Foo.class")}; + Set jars = Collections.singleton(makeJar(dir, "foo.jar", manifest, "Foo.class")); try { JarHell.checkJarHell(jars); fail("did not get expected exception"); @@ -161,7 +159,7 @@ public class JarHellTests extends ESTestCase { Attributes attributes = manifest.getMainAttributes(); attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0"); attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "bogus"); - URL[] jars = {makeJar(dir, "foo.jar", manifest, "Foo.class")}; + Set jars = Collections.singleton(makeJar(dir, "foo.jar", manifest, "Foo.class")); try { JarHell.checkJarHell(jars); fail("did not get expected exception"); @@ -176,8 +174,7 @@ public class JarHellTests extends ESTestCase { Attributes attributes = manifest.getMainAttributes(); attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0"); attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "1.7"); - URL[] jars = {makeJar(dir, "foo.jar", manifest, "Foo.class")}; - + Set jars = Collections.singleton(makeJar(dir, "foo.jar", manifest, "Foo.class")); JarHell.checkJarHell(jars); } @@ -188,7 +185,7 @@ public class JarHellTests extends ESTestCase { Attributes attributes = manifest.getMainAttributes(); attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0"); attributes.put(new Attributes.Name("X-Compile-Elasticsearch-Version"), Version.CURRENT.toString()); - URL[] jars = {makeJar(dir, "foo.jar", manifest, "Foo.class")}; + Set jars = Collections.singleton(makeJar(dir, "foo.jar", manifest, "Foo.class")); JarHell.checkJarHell(jars); } @@ -199,7 +196,7 @@ public class JarHellTests extends ESTestCase { Attributes attributes = manifest.getMainAttributes(); attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0"); attributes.put(new Attributes.Name("X-Compile-Elasticsearch-Version"), "1.0-bogus"); - URL[] jars = {makeJar(dir, "foo.jar", manifest, "Foo.class")}; + Set jars = Collections.singleton(makeJar(dir, "foo.jar", manifest, "Foo.class")); try { JarHell.checkJarHell(jars); fail("did not get expected exception"); @@ -242,8 +239,8 @@ public class JarHellTests extends ESTestCase { Path element1 = createTempDir(); Path element2 = createTempDir(); - URL expected[] = { element1.toUri().toURL(), element2.toUri().toURL() }; - assertArrayEquals(expected, JarHell.parseClassPath(element1.toString() + ":" + element2.toString())); + Set expected = asSet(element1.toUri().toURL(), element2.toUri().toURL()); + assertEquals(expected, JarHell.parseClassPath(element1.toString() + ":" + element2.toString())); } /** @@ -271,8 +268,8 @@ public class JarHellTests extends ESTestCase { Path element1 = createTempDir(); Path element2 = createTempDir(); - URL expected[] = { element1.toUri().toURL(), element2.toUri().toURL() }; - assertArrayEquals(expected, JarHell.parseClassPath(element1.toString() + ";" + element2.toString())); + Set expected = asSet(element1.toUri().toURL(), element2.toUri().toURL()); + assertEquals(expected, JarHell.parseClassPath(element1.toString() + ";" + element2.toString())); } /** @@ -298,13 +295,13 @@ public class JarHellTests extends ESTestCase { assumeTrue("test is designed for windows-like systems only", ";".equals(System.getProperty("path.separator"))); assumeTrue("test is designed for windows-like systems only", "\\".equals(System.getProperty("file.separator"))); - URL expected[] = { + Set expected = asSet( PathUtils.get("c:\\element1").toUri().toURL(), PathUtils.get("c:\\element2").toUri().toURL(), PathUtils.get("c:\\element3").toUri().toURL(), - PathUtils.get("c:\\element 4").toUri().toURL(), - }; - URL actual[] = JarHell.parseClassPath("c:\\element1;c:\\element2;/c:/element3;/c:/element 4"); - assertArrayEquals(expected, actual); + PathUtils.get("c:\\element 4").toUri().toURL() + ); + Set actual = JarHell.parseClassPath("c:\\element1;c:\\element2;/c:/element3;/c:/element 4"); + assertEquals(expected, actual); } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 98da8239775..7ec8f41034f 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -439,6 +439,16 @@ public class ScopedSettingsTests extends ESTestCase { clusterSettings2.validate(settings); } + public void testDiffSecureSettings() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("some.secure.setting", "secret"); + Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, + Collections.singleton(SecureSetting.secureString("some.secure.setting", null, false))); + + Settings diffed = clusterSettings.diff(Settings.EMPTY, settings); + assertTrue(diffed.isEmpty()); + } public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { Settings build = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) diff --git a/core/src/test/java/org/elasticsearch/common/settings/SecureStringTests.java b/core/src/test/java/org/elasticsearch/common/settings/SecureStringTests.java new file mode 100644 index 00000000000..4f9ed8ed4b9 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/settings/SecureStringTests.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.settings; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; + +public class SecureStringTests extends ESTestCase { + + public void testCloseableCharsDoesNotModifySecureString() { + final char[] password = randomAsciiOfLengthBetween(1, 32).toCharArray(); + SecureString secureString = new SecureString(password); + assertSecureStringEqualToChars(password, secureString); + try (SecureString copy = secureString.clone()) { + assertArrayEquals(password, copy.getChars()); + assertThat(copy.getChars(), not(sameInstance(password))); + } + assertSecureStringEqualToChars(password, secureString); + } + + public void testClosingSecureStringDoesNotModifyCloseableChars() { + final char[] password = randomAsciiOfLengthBetween(1, 32).toCharArray(); + SecureString secureString = new SecureString(password); + assertSecureStringEqualToChars(password, secureString); + SecureString copy = secureString.clone(); + assertArrayEquals(password, copy.getChars()); + assertThat(copy.getChars(), not(sameInstance(password))); + final char[] passwordCopy = Arrays.copyOf(password, password.length); + assertArrayEquals(password, passwordCopy); + secureString.close(); + assertNotEquals(password[0], passwordCopy[0]); + assertArrayEquals(passwordCopy, copy.getChars()); + } + + public void testClosingChars() { + final char[] password = randomAsciiOfLengthBetween(1, 32).toCharArray(); + SecureString secureString = new SecureString(password); + assertSecureStringEqualToChars(password, secureString); + SecureString copy = secureString.clone(); + assertArrayEquals(password, copy.getChars()); + assertThat(copy.getChars(), not(sameInstance(password))); + copy.close(); + if (randomBoolean()) { + // close another time and no exception is thrown + copy.close(); + } + IllegalStateException e = expectThrows(IllegalStateException.class, copy::getChars); + assertThat(e.getMessage(), containsString("already been closed")); + } + + public void testGetCloseableCharsAfterSecureStringClosed() { + final char[] password = randomAsciiOfLengthBetween(1, 32).toCharArray(); + SecureString secureString = new SecureString(password); + assertSecureStringEqualToChars(password, secureString); + secureString.close(); + if (randomBoolean()) { + // close another time and no exception is thrown + secureString.close(); + } + IllegalStateException e = expectThrows(IllegalStateException.class, secureString::clone); + assertThat(e.getMessage(), containsString("already been closed")); + } + + private void assertSecureStringEqualToChars(char[] expected, SecureString secureString) { + int pos = 0; + for (int i : secureString.chars().toArray()) { + if (pos >= expected.length) { + fail("Index " + i + " greated than or equal to array length " + expected.length); + } else { + assertEquals(expected[pos++], (char) i); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 8eeced6cfcd..2beac7eef2f 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; @@ -61,6 +62,7 @@ import org.mockito.Matchers; import java.io.Closeable; import java.io.IOException; +import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; @@ -423,7 +425,16 @@ public class UnicastZenPingTests extends ESTestCase { new NoneCircuitBreakerService(), new NamedWriteableRegistry(Collections.emptyList()), networkService, - Version.CURRENT); + Version.CURRENT) { + + @Override + public BoundTransportAddress boundAddress() { + return new BoundTransportAddress( + new TransportAddress[]{new TransportAddress(InetAddress.getLoopbackAddress(), 9500)}, + new TransportAddress(InetAddress.getLoopbackAddress(), 9500) + ); + } + }; closeables.push(transport); final TransportService transportService = new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null); @@ -446,6 +457,50 @@ public class UnicastZenPingTests extends ESTestCase { assertThat(ports, equalTo(IntStream.range(9300, 9300 + limitPortCounts).mapToObj(m -> m).collect(Collectors.toSet()))); } + public void testRemovingLocalAddresses() throws InterruptedException { + final NetworkService networkService = new NetworkService(Settings.EMPTY, Collections.emptyList()); + final InetAddress loopbackAddress = InetAddress.getLoopbackAddress(); + final Transport transport = new MockTcpTransport( + Settings.EMPTY, + threadPool, + BigArrays.NON_RECYCLING_INSTANCE, + new NoneCircuitBreakerService(), + new NamedWriteableRegistry(Collections.emptyList()), + networkService, + Version.CURRENT) { + + @Override + public BoundTransportAddress boundAddress() { + return new BoundTransportAddress( + new TransportAddress[]{ + new TransportAddress(loopbackAddress, 9300), + new TransportAddress(loopbackAddress, 9301) + }, + new TransportAddress(loopbackAddress, 9302) + ); + } + }; + closeables.push(transport); + final TransportService transportService = + new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null); + closeables.push(transportService); + final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( + executorService, + logger, + Collections.singletonList(NetworkAddress.format(loopbackAddress)), + 10, + transportService, + "test_", + TimeValue.timeValueSeconds(1)); + assertThat(discoveryNodes, hasSize(7)); + final Set ports = new HashSet<>(); + for (final DiscoveryNode discoveryNode : discoveryNodes) { + assertTrue(discoveryNode.getAddress().address().getAddress().isLoopbackAddress()); + ports.add(discoveryNode.getAddress().getPort()); + } + assertThat(ports, equalTo(IntStream.range(9303, 9310).mapToObj(m -> m).collect(Collectors.toSet()))); + } + public void testUnknownHost() throws InterruptedException { final Logger logger = mock(Logger.class); final NetworkService networkService = new NetworkService(Settings.EMPTY, Collections.emptyList()); @@ -460,6 +515,14 @@ public class UnicastZenPingTests extends ESTestCase { networkService, Version.CURRENT) { + @Override + public BoundTransportAddress boundAddress() { + return new BoundTransportAddress( + new TransportAddress[]{new TransportAddress(InetAddress.getLoopbackAddress(), 9300)}, + new TransportAddress(InetAddress.getLoopbackAddress(), 9300) + ); + } + @Override public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException { throw unknownHostException; @@ -499,6 +562,14 @@ public class UnicastZenPingTests extends ESTestCase { networkService, Version.CURRENT) { + @Override + public BoundTransportAddress boundAddress() { + return new BoundTransportAddress( + new TransportAddress[]{new TransportAddress(InetAddress.getLoopbackAddress(), 9500)}, + new TransportAddress(InetAddress.getLoopbackAddress(), 9500) + ); + } + @Override public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException { if ("hostname1".equals(address)) { @@ -703,7 +774,15 @@ public class UnicastZenPingTests extends ESTestCase { new NoneCircuitBreakerService(), new NamedWriteableRegistry(Collections.emptyList()), networkService, - Version.CURRENT); + Version.CURRENT) { + @Override + public BoundTransportAddress boundAddress() { + return new BoundTransportAddress( + new TransportAddress[]{new TransportAddress(InetAddress.getLoopbackAddress(), 9300)}, + new TransportAddress(InetAddress.getLoopbackAddress(), 9300) + ); + } + }; closeables.push(transport); final TransportService transportService = diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java index 5231ccfc380..70c1486fe1f 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java @@ -27,8 +27,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; import java.util.List; import static org.hamcrest.Matchers.equalTo; @@ -56,16 +58,17 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase { bytesList1.add(randomBytes()); bytesList1.add(randomBytes()); XContentBuilder doc = XContentFactory.jsonBuilder().startObject().startArray("field").value(bytesList1.get(0)).value(bytesList1.get(1)).endArray().endObject(); - ParsedDocument d = mapper.parse("test", "test", "1", doc.bytes()); + ParsedDocument d = mapper.parse(SourceToParse.source("test", "test", "1", + doc.bytes(), XContentType.JSON)); writer.addDocument(d.rootDoc()); byte[] bytes1 = randomBytes(); doc = XContentFactory.jsonBuilder().startObject().field("field", bytes1).endObject(); - d = mapper.parse("test", "test", "2", doc.bytes()); + d = mapper.parse(SourceToParse.source("test", "test", "2", doc.bytes(), XContentType.JSON)); writer.addDocument(d.rootDoc()); doc = XContentFactory.jsonBuilder().startObject().endObject(); - d = mapper.parse("test", "test", "3", doc.bytes()); + d = mapper.parse(SourceToParse.source("test", "test", "3", doc.bytes(), XContentType.JSON)); writer.addDocument(d.rootDoc()); // test remove duplicate value @@ -73,7 +76,7 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase { bytesList2.add(randomBytes()); bytesList2.add(randomBytes()); doc = XContentFactory.jsonBuilder().startObject().startArray("field").value(bytesList2.get(0)).value(bytesList2.get(1)).value(bytesList2.get(0)).endArray().endObject(); - d = mapper.parse("test", "test", "4", doc.bytes()); + d = mapper.parse(SourceToParse.source("test", "test", "4", doc.bytes(), XContentType.JSON)); writer.addDocument(d.rootDoc()); List readers = refreshReader(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java index 2243c1182bd..fac6e4c84b1 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -92,7 +93,9 @@ public class BinaryFieldMapperTests extends ESSingleNodeTestCase { assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { - ParsedDocument doc = mapper.parse("test", "type", "id", XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes()); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "id", + XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes(), + XContentType.JSON)); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); assertEquals(new BytesRef(value), indexedValue); FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index 2486f91ccd3..9c672924317 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.plugins.Plugin; @@ -80,11 +81,12 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", true) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); try (Directory dir = new RAMDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())))) { @@ -150,12 +152,13 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { String falsy = randomFrom("false", "off", "no", "0"); String truthy = randomFrom("true", "on", "yes", "1"); - ParsedDocument parsedDoc = defaultMapper.parse("legacy", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument parsedDoc = defaultMapper.parse(SourceToParse.source("legacy", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field1", falsy) .field("field2", truthy) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); Document doc = parsedDoc.rootDoc(); assertEquals("F", doc.getField("field1").stringValue()); assertEquals("T", doc.getField("field2").stringValue()); @@ -190,7 +193,8 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { // omit "false"/"true" here as they should still be parsed correctly .field("field", randomFrom("off", "no", "0", "on", "yes", "1")) .endObject().bytes(); - MapperParsingException ex = expectThrows(MapperParsingException.class, () -> defaultMapper.parse("test", "type", "1", source)); + MapperParsingException ex = expectThrows(MapperParsingException.class, + () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON))); assertEquals("failed to parse [field]", ex.getMessage()); } @@ -213,7 +217,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .startObject() .field("field", false) .endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", source); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); assertNotNull(doc.rootDoc().getField("field.as_string")); } @@ -236,13 +240,14 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument parsedDoc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("bool1", true) .field("bool2", true) .field("bool3", true) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); Document doc = parsedDoc.rootDoc(); IndexableField[] fields = doc.getFields("bool1"); assertEquals(2, fields.length); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java b/core/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java index 6e841509c0d..ac14f2905cf 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java @@ -35,9 +35,10 @@ public class CamelCaseFieldNameTests extends ESSingleNodeTestCase { client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper documentMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() .field("thisIsCamelCase", "value1") - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 802aee7f482..5da524b69c0 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -152,11 +153,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .field("completion", "suggestion") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 1); } @@ -171,11 +173,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .array("completion", "suggestion1", "suggestion2") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 2); } @@ -190,14 +193,15 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .startObject("completion") .field("input", "suggestion") .field("weight", 2) .endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 1); } @@ -212,14 +216,15 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .startObject("completion") .array("input", "suggestion1", "suggestion2", "suggestion3") .field("weight", 2) .endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 3); } @@ -234,7 +239,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .startArray("completion") .startObject() @@ -251,7 +256,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 3); } @@ -266,7 +272,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .startArray("completion") .startObject() @@ -283,7 +289,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 6); } @@ -297,7 +304,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); try { - defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() + defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .startObject("field1") .field("input", "suggestion1") @@ -307,7 +314,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .field("weight", 3) .endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); fail("Supplying contexts to a non context-enabled field should error"); } catch (MapperParsingException e) { assertThat(e.getRootCause().getMessage(), containsString("field1")); @@ -326,11 +334,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { charsRefBuilder.append("sugg"); charsRefBuilder.setCharAt(2, '\u001F'); try { - defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() + defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .field("completion", charsRefBuilder.get().toString()) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); fail("No error indexing value with reserved character [0x1F]"); } catch (MapperParsingException e) { Throwable cause = e.unwrapCause().getCause(); @@ -340,11 +349,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { charsRefBuilder.setCharAt(2, '\u0000'); try { - defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() + defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .field("completion", charsRefBuilder.get().toString()) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); fail("No error indexing value with reserved character [0x0]"); } catch (MapperParsingException e) { Throwable cause = e.unwrapCause().getCause(); @@ -354,11 +364,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { charsRefBuilder.setCharAt(2, '\u001E'); try { - defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() + defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .field("completion", charsRefBuilder.get().toString()) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); fail("No error indexing value with reserved character [0x1E]"); } catch (MapperParsingException e) { Throwable cause = e.unwrapCause().getCause(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java index 85fddfc8001..4b2f629c36e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java @@ -94,7 +94,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .field("int_to_str_test", 42) .endObject().bytes(); - ParsedDocument parsedDoc = docMapper.parse("test", "type1", "1", json); + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)); ParseContext.Document doc = parsedDoc.rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(2)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -149,7 +149,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .startObject("foo").startObject("bar").field("baz", "zoo").endObject().endObject() .endObject().bytes(); - ParseContext.Document doc = docMapper.parse("test", "type1", "1", json).rootDoc(); + ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, + XContentType.JSON)).rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(1)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -175,7 +176,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .field("new_field", "bar") .endObject().bytes(); - ParseContext.Document doc = docMapper.parse("test", "type1", "1", json).rootDoc(); + ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, + XContentType.JSON)).rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(1)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -211,7 +213,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .field("new_field", "bar") .endObject().bytes(); - ParseContext.Document doc = docMapper.parse("test", "type1", "1", json).rootDoc(); + ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, + XContentType.JSON)).rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(1)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -240,7 +243,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject().bytes(); try { - docMapper.parse("test", "type1", "1", json).rootDoc(); + docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); fail(); } catch (MapperParsingException ex) { assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [very] within [type1] is not allowed")); @@ -274,7 +277,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject().bytes(); try { - docMapper.parse("test", "type1", "1", json).rootDoc(); + docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); fail(); } catch (MapperParsingException ex) { assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [field] within [very.far] is not allowed")); @@ -377,7 +380,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endArray() .endObject(); - ParsedDocument doc = mapper.parse("test", "type", "1", jsonDoc.bytes()); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", jsonDoc.bytes(), XContentType.JSON)); assertEquals(6, doc.docs().size()); Document nested = doc.docs().get(0); @@ -439,7 +442,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject().bytes(); try { - docMapper.parse("test", "type1", "1", json).rootDoc(); + docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); fail(); } catch (MapperParsingException ex) { assertThat(ex.getMessage(), startsWith("It is forbidden to create dynamic nested objects ([very]) through `copy_to`")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index 300203b1a35..24bfc930306 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -62,11 +63,12 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "2016-03-11") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -90,11 +92,12 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "2016-03-11") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -111,11 +114,12 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "2016-03-11") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -132,11 +136,12 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "2016-03-11") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(3, fields.length); @@ -158,11 +163,12 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "2016-03-99") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("Cannot parse \"2016-03-99\"")); @@ -173,11 +179,12 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", ":1") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); @@ -193,11 +200,12 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", 1457654400) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -214,11 +222,12 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", 1457654400) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); } public void testNullValue() throws IOException { @@ -234,11 +243,12 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .nullField("field") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); mapping = XContentFactory.jsonBuilder().startObject() @@ -254,11 +264,12 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .nullField("field") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); IndexableField pointField = fields[0]; @@ -322,11 +333,12 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { final DateTimeZone randomTimeZone = randomBoolean() ? DateTimeZone.forID(randomFrom("UTC", "CET")) : randomDateTimeZone(); final DateTime randomDate = new DateTime(2016, 03, 11, 0, 0, 0, randomTimeZone); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate)) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java index a9b1753ba1c..e2fbbe7ebfe 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.DocumentFieldMappers; import org.elasticsearch.index.mapper.DocumentMapper; @@ -167,7 +168,11 @@ public class DocumentMapperMergeTests extends ESSingleNodeTestCase { barrier.await(); for (int i = 0; i < 200 && stopped.get() == false; i++) { final String fieldName = Integer.toString(i); - ParsedDocument doc = documentMapper.parse("test", "test", fieldName, new BytesArray("{ \"" + fieldName + "\" : \"test\" }")); + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", + "test", + fieldName, + new BytesArray("{ \"" + fieldName + "\" : \"test\" }"), + XContentType.JSON)); Mapping update = doc.dynamicMappingsUpdate(); assert update != null; lastIntroducedFieldName.set(fieldName); @@ -186,7 +191,11 @@ public class DocumentMapperMergeTests extends ESSingleNodeTestCase { while(stopped.get() == false) { final String fieldName = lastIntroducedFieldName.get(); final BytesReference source = new BytesArray("{ \"" + fieldName + "\" : \"test\" }"); - ParsedDocument parsedDoc = documentMapper.parse("test", "test", "random", source); + ParsedDocument parsedDoc = documentMapper.parse(SourceToParse.source("test", + "test", + "random", + source, + XContentType.JSON)); if (parsedDoc.dynamicMappingsUpdate() != null) { // not in the mapping yet, try again continue; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index db748143576..cd4d1d21c6c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -71,7 +71,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject().startObject("foo") .field("field", "1234") .endObject().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.rootDoc().getField("field")); assertNotNull(doc.rootDoc().getField(UidFieldMapper.NAME)); } @@ -89,7 +89,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("foo", "1234") .field("bar", 10) .endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.rootDoc().getField("foo")); assertNotNull(doc.rootDoc().getField("bar")); assertNotNull(doc.rootDoc().getField(UidFieldMapper.NAME)); @@ -114,7 +114,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("baz", 789) .endObject() .endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.dynamicMappingsUpdate()); // no update! String[] values = doc.rootDoc().getValues("foo.bar.baz"); assertEquals(3, values.length); @@ -136,7 +136,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("foo.bar", 123) .endObject().bytes(); MapperParsingException e = expectThrows(MapperParsingException.class, - () -> mapper.parse("test", "type", "1", bytes)); + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals( "Cannot add a value for field [foo.bar] since one of the intermediate objects is mapped as a nested object: [foo]", e.getMessage()); @@ -162,7 +162,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("foo.bar",42) .endObject().bytes(); MapperParsingException e = expectThrows(MapperParsingException.class, - () -> mapper.parse("test", "type", "1", bytes)); + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals( "It is forbidden to create dynamic nested objects ([foo]) through `copy_to` or dots in field names", e.getMessage()); @@ -183,7 +183,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject().startObject("foo") .field("bar", "something") .endObject().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.rootDoc().getField("foo.bar")); } @@ -203,7 +203,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject().startObject("foo").startObject("bar") .field("baz", "something") .endObject().endObject().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.rootDoc().getField("foo.bar.baz")); } @@ -222,7 +222,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject().startObject("foo") .field("bar", "something") .endObject().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.dynamicMappingsUpdate()); assertNull(doc.rootDoc().getField("foo.bar")); } @@ -350,7 +350,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startArray().value(0).value(0).endArray() .startArray().value(1).value(1).endArray() .endArray().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo").length); } @@ -368,7 +368,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(0) .value(1) .endArray().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } @@ -383,7 +383,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(0) .value(1) .endArray().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } @@ -398,7 +398,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(0) .value(1) .endArray().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo").length); } @@ -414,7 +414,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(1) .endArray().endObject().bytes(); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse("test", "type", "1", bytes)); + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); } @@ -430,7 +430,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startArray().value(0).value(0).endArray() .startArray().value(1).value(1).endArray() .endArray().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo").length); } @@ -446,7 +446,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(0) .value(1) .endArray().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } @@ -464,7 +464,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject().startObject("foo") .field("bar", "baz") .endObject().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar").length); } @@ -478,7 +478,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject().startObject("foo") .field("bar", "baz") .endObject().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar").length); } @@ -493,7 +493,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("bar", "baz") .endObject().endObject().bytes(); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse("test", "type", "1", bytes)); + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); } @@ -507,7 +507,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject() .field("bar", "baz") .endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("bar").length); } @@ -522,7 +522,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("bar", "baz") .endObject().bytes(); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse("test", "type", "1", bytes)); + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [bar] within [type] is not allowed", exception.getMessage()); } @@ -536,7 +536,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject() .field("bar", (String) null) .endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("bar").length); } @@ -551,7 +551,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("bar", (String) null) .endObject().bytes(); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse("test", "type", "1", bytes)); + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [bar] within [type] is not allowed", exception.getMessage()); } @@ -565,7 +565,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference bytes = XContentFactory.jsonBuilder() .startObject().field("foo", (Long) null) .endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo").length); } @@ -580,7 +580,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(0) .value(1) .endArray().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -607,7 +607,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(0) .value(1) .endArray().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -633,7 +633,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(0) .value(1) .endArray().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -659,7 +659,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(0) .value(1) .endArray().endObject().bytes(); - MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse("test", "type", "1", bytes)); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " + "Existing mapping for [foo] must be of type object but found [long].", exception.getMessage()); } @@ -675,7 +676,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(0) .value(1) .endArray().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz").length); } @@ -691,7 +692,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(1) .endArray().endObject().bytes(); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse("test", "type", "1", bytes)); + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); } @@ -704,7 +705,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference bytes = XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) .endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -729,7 +730,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference bytes = XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) .endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -753,7 +754,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference bytes = XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) .endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -777,7 +778,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference bytes = XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) .endObject().bytes(); - MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse("test", "type", "1", bytes)); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " + "Existing mapping for [foo] must be of type object but found [long].", exception.getMessage()); } @@ -791,7 +793,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference bytes = XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) .endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz").length); } @@ -805,7 +807,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject().field("foo.bar.baz", 0) .endObject().bytes(); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse("test", "type", "1", bytes)); + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); } @@ -819,7 +821,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject().startObject("foo.bar.baz") .field("a", 0) .endObject().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -848,7 +850,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject().startObject("foo.bar.baz") .field("a", 0) .endObject().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -872,7 +874,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference bytes = XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject() .bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -898,7 +900,9 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference bytes = XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject() .bytes(); - MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse("test", "type", "1", bytes)); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " + "Existing mapping for [foo] must be of type object but found [long].", exception.getMessage()); } @@ -913,7 +917,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .startObject().startObject("foo.bar.baz") .field("a", 0) .endObject().endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz.a").length); } @@ -928,7 +932,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("a", 0) .endObject().endObject().bytes(); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse("test", "type", "1", bytes)); + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); } @@ -939,12 +943,11 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference bytes = XContentFactory.jsonBuilder().startObject().field("_ttl", 0).endObject().bytes(); MapperParsingException e = expectThrows(MapperParsingException.class, () -> - mapper.parse("test", "type", "1", bytes) - ); + mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertTrue(e.getMessage(), e.getMessage().contains("cannot be added inside a document")); BytesReference bytes2 = XContentFactory.jsonBuilder().startObject().field("foo._ttl", 0).endObject().bytes(); - mapper.parse("test", "type", "1", bytes2); // parses without error + mapper.parse(SourceToParse.source("test", "type", "1", bytes2, XContentType.JSON)); // parses without error } public void testSimpleMapper() throws Exception { @@ -955,10 +958,10 @@ public class DocumentParserTests extends ESSingleNodeTestCase { indexService.mapperService()).build(indexService.mapperService()); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); + Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); - doc = docMapper.parse("test", "person", "1", json).rootDoc(); + doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); } public void testParseToJsonAndParse() throws Exception { @@ -969,7 +972,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { // reparse it DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); - Document doc = builtDocMapper.parse("test", "person", "1", json).rootDoc(); + Document doc = builtDocMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.get(docMapper.uidMapper().fieldType().name()), equalTo(Uid.createUid("person", "1"))); assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); } @@ -981,7 +984,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { assertThat((String) docMapper.meta().get("param1"), equalTo("value1")); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); + Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.get(docMapper.uidMapper().fieldType().name()), equalTo(Uid.createUid("person", "1"))); assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); } @@ -990,7 +993,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json"); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json")); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); + Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.get(docMapper.uidMapper().fieldType().name()), equalTo(Uid.createUid("person", "1"))); assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); } @@ -1016,7 +1019,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8)); try { - docMapper.parse("test", "person", "1", json).rootDoc(); + docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); fail("this point is never reached"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("failed to parse, document is empty")); @@ -1028,13 +1031,14 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("test1", "value1") .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("test1"), equalTo("value1")); assertThat(doc.rootDoc().get("test2"), equalTo("value2")); @@ -1046,13 +1050,14 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") .field("test1", "value1") .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject().endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("type.test1"), equalTo("value1")); assertThat(doc.rootDoc().get("type.test2"), equalTo("value2")); @@ -1064,14 +1069,15 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("type", "value_type") .field("test1", "value1") .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("type"), equalTo("value_type")); assertThat(doc.rootDoc().get("test1"), equalTo("value1")); @@ -1084,14 +1090,15 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") .field("type", "value_type") .field("test1", "value1") .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject().endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("type.type"), equalTo("value_type")); assertThat(doc.rootDoc().get("type.test1"), equalTo("value1")); @@ -1104,14 +1111,15 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("type").field("type_field", "type_value").endObject() .field("test1", "value1") .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); // in this case, we analyze the type object as the actual document, and ignore the other same level fields assertThat(doc.rootDoc().get("type.type_field"), equalTo("type_value")); @@ -1124,14 +1132,15 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") .startObject("type").field("type_field", "type_value").endObject() .field("test1", "value1") .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject().endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("type.type.type_field"), equalTo("type_value")); assertThat(doc.rootDoc().get("type.test1"), equalTo("value1")); @@ -1144,14 +1153,15 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") .field("test1", "value1") .field("test2", "value2") .field("type", "value_type") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject().endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("type.type"), equalTo("value_type")); assertThat(doc.rootDoc().get("type.test1"), equalTo("value1")); @@ -1164,14 +1174,15 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") .field("test1", "value1") .field("type", "value_type") .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject().endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("type.type"), equalTo("value_type")); assertThat(doc.rootDoc().get("type.test1"), equalTo("value1")); @@ -1184,14 +1195,15 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("test1", "value1") .startObject("type").field("type_field", "type_value").endObject() .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); // when the type is not the first one, we don't confuse it... assertThat(doc.rootDoc().get("type.type_field"), equalTo("type_value")); @@ -1205,14 +1217,15 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject().startObject("type") .field("test1", "value1") .startObject("type").field("type_field", "type_value").endObject() .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject().endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("type.type.type_field"), equalTo("type_value")); assertThat(doc.rootDoc().get("type.test1"), equalTo("value1")); @@ -1235,7 +1248,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { // Even though we matched the dynamic format, we do not match on numbers, // which are too likely to be false positives - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); Mapping update = doc.dynamicMappingsUpdate(); assertNotNull(update); Mapper dateMapper = update.root().getMapper("foo"); @@ -1257,7 +1270,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .endObject().bytes(); // We should have generated a date field - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); Mapping update = doc.dynamicMappingsUpdate(); assertNotNull(update); Mapper dateMapper = update.root().getMapper("foo"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index 85e186e2f82..2acd6b5c987 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -48,7 +48,7 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase { DocumentMapper mapper = index.mapperService().documentMapper("type"); QueryShardContext context = index.newQueryShardContext(0, null, () -> 0L); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field1", "value1") .field("field2", 1) @@ -56,7 +56,8 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase { .field("field4", "2010-01-01") .startArray("field5").value(1).value(2).value(3).endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type") .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 344f3debdf6..71ae77aa55e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -58,12 +58,13 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() .startObject() .field("field1", "value1") .field("field2", "value2") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("field1"), equalTo("value1")); assertThat(doc.rootDoc().get("field2"), equalTo("value2")); @@ -79,12 +80,13 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() .startObject() .field("field1", "value1") .field("field2", "value2") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("field1"), equalTo("value1")); assertThat(doc.rootDoc().get("field2"), nullValue()); @@ -101,20 +103,22 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse("test", "type", "1", jsonBuilder() + StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() .startObject() .field("field1", "value1") .field("field2", "value2") .endObject() - .bytes())); + .bytes(), + XContentType.JSON))); assertThat(e.getMessage(), equalTo("mapping set to strict, dynamic introduction of [field2] within [type] is not allowed")); - e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field1", "value1") .field("field2", (String) null) .endObject() - .bytes())); + .bytes(), + XContentType.JSON))); assertThat(e.getMessage(), equalTo("mapping set to strict, dynamic introduction of [field2] within [type] is not allowed")); } @@ -130,13 +134,14 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() .startObject().startObject("obj1") .field("field1", "value1") .field("field2", "value2") .endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("obj1.field1"), equalTo("value1")); assertThat(doc.rootDoc().get("obj1.field2"), nullValue()); @@ -155,13 +160,14 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () -> - defaultMapper.parse("test", "type", "1", jsonBuilder() + defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() .startObject().startObject("obj1") .field("field1", "value1") .field("field2", "value2") .endObject() .endObject() - .bytes())); + .bytes(), + XContentType.JSON))); assertThat(e.getMessage(), equalTo("mapping set to strict, dynamic introduction of [field2] within [obj1] is not allowed")); } @@ -585,7 +591,8 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .field("baz", (double) 3.2f) // double that can be accurately represented as a float .field("quux", "3.2") // float detected through numeric detection .endObject().bytes(); - ParsedDocument parsedDocument = mapper.parse("index", "type", "id", source); + ParsedDocument parsedDocument = mapper.parse(SourceToParse.source("index", "type", "id", source, + XContentType.JSON)); Mapping update = parsedDocument.dynamicMappingsUpdate(); assertNotNull(update); assertThat(((FieldMapper) update.root().getMapper("foo")).fieldType().typeName(), equalTo("float")); @@ -603,12 +610,13 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("s_long", "100") .field("s_double", "100.0") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type") .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); @@ -629,12 +637,13 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("s_long", "100") .field("s_double", "100.0") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertAcked(client().admin().indices().preparePutMapping("test").setType("type") .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get()); @@ -677,13 +686,14 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("date1", "2016-11-20") .field("date2", "2016-11-20") .field("date3", "2016-11-20") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertAcked(client().admin().indices().preparePutMapping("test").setType("type") .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 437dd7cb99d..70cc2c08441 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -51,7 +51,8 @@ public class DynamicTemplatesTests extends ESSingleNodeTestCase { DocumentMapper docMapper = index.mapperService().documentMapper("person"); builder = JsonXContent.contentBuilder(); builder.startObject().field("s", "hello").field("l", 1).endObject(); - ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", builder.bytes()); + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", builder.bytes(), + XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); @@ -73,7 +74,8 @@ public class DynamicTemplatesTests extends ESSingleNodeTestCase { client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("person"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), + XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); docMapper = index.mapperService().documentMapper("person"); @@ -131,7 +133,8 @@ public class DynamicTemplatesTests extends ESSingleNodeTestCase { client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("person"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), + XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); docMapper = index.mapperService().documentMapper("person"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java index 46ffc4e5864..8088c8576fe 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.mapper.MapperRegistry; @@ -76,11 +77,12 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string() )); - ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getField("field.bool"), notNullValue()); assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T")); @@ -136,11 +138,12 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject() .string())); - ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getField("field.bool"), notNullValue()); assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T")); @@ -204,11 +207,12 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject() .string())); - ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getField("field.bool"), notNullValue()); assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index dc2de3e3dd9..aa66526bf42 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.IndicesModule; @@ -88,14 +89,15 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("a", "100") .startObject("b") .field("c", 42) .endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertFieldNames(set("a", "a.keyword", "b", "b.c", "_uid", "_type", "_version", "_seq_no", "_primary_term", "_source"), doc); } @@ -108,11 +110,12 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertTrue(fieldNamesMapper.fieldType().isEnabled()); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertFieldNames(set("field", "field.keyword", "_uid", "_type", "_version", "_seq_no", "_primary_term", "_source"), doc); } @@ -125,11 +128,12 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertFalse(fieldNamesMapper.fieldType().isEnabled()); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertNull(doc.rootDoc().get("_field_names")); } @@ -244,7 +248,8 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { queryShardContext); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument parsedDocument = mapper.parse("index", "type", "id", new BytesArray("{}")); + ParsedDocument parsedDocument = mapper.parse(SourceToParse.source("index", "type", "id", new BytesArray("{}"), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(FieldNamesFieldMapper.NAME); boolean found = false; for (IndexableField f : fields) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java index 8afe07a6e68..57a6173bc65 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java @@ -40,7 +40,8 @@ public class GenericStoreDynamicTemplateTests extends ESSingleNodeTestCase { client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("person"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), + XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); docMapper = index.mapperService().documentMapper("person"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index 1b4602ef7f2..f8775073e21 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -55,11 +55,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("point", stringEncode(1.3, 1.2)) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } @@ -70,11 +71,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } @@ -85,14 +87,15 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startArray("point") .startObject().field("lat", 1.2).field("lon", 1.3).endObject() .startObject().field("lat", 1.4).field("lon", 1.5).endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); // doc values are enabled by default, but in this test we disable them; we should only have 2 points assertThat(doc.rootDoc().getFields("point"), notNullValue()); @@ -106,11 +109,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("point", "1.2,1.3") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } @@ -122,11 +126,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("point", "1.2,1.3") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } @@ -137,14 +142,15 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startArray("point") .value("1.2,1.3") .value("1.4,1.5") .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); // doc values are enabled by default, but in this test we disable them; we should only have 2 points assertThat(doc.rootDoc().getFields("point"), notNullValue()); @@ -157,11 +163,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startArray("point").value(1.3).value(1.2).endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } @@ -173,11 +180,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { String mapping = xContentBuilder.endObject().endObject().endObject().endArray().endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startArray("point").value(1.3).value(1.2).endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } @@ -188,11 +196,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startArray("point").value(1.3).value(1.2).endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); assertThat(doc.rootDoc().getFields("point").length, equalTo(3)); @@ -205,14 +214,15 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startArray("point") .startArray().value(1.3).value(1.2).endArray() .startArray().value(1.5).value(1.4).endArray() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getFields("point"), notNullValue()); assertThat(doc.rootDoc().getFields("point").length, CoreMatchers.equalTo(4)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java index b7ad6a7e4c3..3c12d18b128 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java @@ -40,10 +40,11 @@ public class IdFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get(UidFieldMapper.NAME), notNullValue()); assertThat(doc.rootDoc().get(IdFieldMapper.NAME), nullValue()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java index f0a0b818f9d..910fa0f74fa 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; @@ -50,11 +51,12 @@ public class IndexFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("_index"), nullValue()); assertThat(doc.rootDoc().get("field"), equalTo("value")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index 9878267d353..88db0b1b274 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -65,11 +66,12 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "::1") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -93,11 +95,12 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "::1") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -114,11 +117,12 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "::1") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -136,11 +140,12 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "::1") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(3, fields.length); @@ -163,11 +168,12 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", ":1") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("':1' is not an IP string literal")); @@ -177,11 +183,12 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", ":1") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); @@ -200,11 +207,12 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .nullField("field") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); mapping = XContentFactory.jsonBuilder().startObject() @@ -220,11 +228,12 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase { mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .nullField("field") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); IndexableField pointField = fields[0]; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java index f43bf73a3d7..c17df90b5a2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext.Document; @@ -45,7 +46,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); BytesReference json = XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject().bytes(); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); + Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); @@ -62,7 +63,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(docMapper.mappers().getMapper("name.not_indexed2"), nullValue()); assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); - doc = docMapper.parse("test", "person", "1", json).rootDoc(); + doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); @@ -101,7 +102,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); BytesReference json = XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject().bytes(); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); + Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); @@ -119,7 +120,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(docMapper.mappers().getMapper("name.not_indexed2"), nullValue()); assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); - doc = docMapper.parse("test", "person", "1", json).rootDoc(); + doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index bffe58db3a6..2da44d57f00 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.plugins.Plugin; @@ -70,11 +71,12 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -106,20 +108,22 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "elk") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); - doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "elasticsearch") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); @@ -133,11 +137,12 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .nullField("field") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -148,19 +153,21 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); - doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .nullField("field") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -176,11 +183,12 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -196,11 +204,12 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -217,11 +226,12 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -238,11 +248,12 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -278,11 +289,12 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -299,11 +311,12 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "AbC") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 5c6ffb70c73..0a6a8f8d469 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -38,6 +38,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; +import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.function.Function; @@ -189,6 +190,22 @@ public class MapperServiceTests extends ESSingleNodeTestCase { assertThat(e.getMessage(), startsWith("Failed to parse mapping [type1]: ")); } + public void testMergeParentTypesSame() { + // Verifies that a merge (absent a DocumentMapper change) + // doesn't change the parentTypes reference. + // The collection was being rewrapped with each merge + // in v5.2 resulting in eventual StackOverflowErrors. + // https://github.com/elastic/elasticsearch/issues/23604 + + IndexService indexService1 = createIndex("index1"); + MapperService mapperService = indexService1.mapperService(); + Set parentTypes = mapperService.getParentTypes(); + + Map> mappings = new HashMap<>(); + mapperService.merge(mappings, MergeReason.MAPPING_UPDATE, false); + assertSame(parentTypes, mapperService.getParentTypes()); + } + public void testOtherDocumentMappersOnlyUpdatedWhenChangingFieldType() throws IOException { IndexService indexService = createIndex("test"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java index 8f17b3e0e0d..e66e0532737 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DateFieldMapper; @@ -62,7 +63,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { private void testMultiField(String mapping) throws Exception { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); + Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); @@ -146,7 +147,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); + Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index ae306009f25..91cf8fdde85 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ObjectMapper.Dynamic; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -42,21 +43,23 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .nullField("nested1") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.docs().size(), equalTo(1)); - doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .startArray("nested").endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.docs().size(), equalTo(1)); } @@ -72,12 +75,13 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); assertThat(nested1Mapper.nested().isNested(), equalTo(true)); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .startObject("nested1").field("field1", "1").field("field2", "2").endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.docs().size(), equalTo(2)); assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString())); @@ -87,7 +91,7 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { assertThat(doc.docs().get(1).get("field"), equalTo("value")); - doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .startArray("nested1") @@ -95,7 +99,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { .startObject().field("field1", "3").field("field2", "4").endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.docs().size(), equalTo(3)); assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString())); @@ -127,7 +132,7 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(false)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .startArray("nested1") @@ -135,7 +140,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6")); @@ -178,7 +184,7 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(true)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .startArray("nested1") @@ -186,7 +192,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6")); @@ -229,7 +236,7 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(true)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .startArray("nested1") @@ -237,7 +244,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6")); @@ -280,7 +288,7 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(false)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(true)); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .startArray("nested1") @@ -288,7 +296,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6")); @@ -326,7 +335,7 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { assertThat(nested1Mapper.nested().isNested(), equalTo(true)); assertThat(nested1Mapper.dynamic(), equalTo(Dynamic.STRICT)); - ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "value") .startArray("nested1") @@ -334,7 +343,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase { .startObject().field("field1", "4").endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.docs().size(), equalTo(3)); assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("4")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java index d48fc3c0b6c..8a46f24998d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -37,30 +38,33 @@ public class NullValueObjectMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("obj1").endObject() .field("value1", "test1") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("value1"), equalTo("test1")); - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .nullField("obj1") .field("value1", "test1") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("value1"), equalTo("test1")); - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("obj1").field("field", "value").endObject() .field("value1", "test1") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().get("obj1.field"), equalTo("value")); assertThat(doc.rootDoc().get("value1"), equalTo("test1")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java index de3a15f865c..871d62d8bd6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.Arrays; @@ -47,11 +48,12 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", 123) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -74,11 +76,12 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", 123) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -96,11 +99,12 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", 123) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -119,11 +123,12 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", 123) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(3, fields.length); @@ -147,11 +152,12 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "123") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -169,11 +175,12 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase { assertEquals(mapping, mapper2.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "123") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("passed as String")); } @@ -193,11 +200,12 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "a") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("For input string: \"a\"")); @@ -207,11 +215,12 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase { DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "a") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); @@ -248,11 +257,12 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .nullField("field") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); Object missing; @@ -274,11 +284,12 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase { mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .nullField("field") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); IndexableField pointField = fields[0]; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index e974a02943b..0e1bead1114 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; @@ -48,7 +49,7 @@ public class ObjectMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - defaultMapper.parse("test", "type", "1", new BytesArray(" {\n" + + defaultMapper.parse(SourceToParse.source("test", "type", "1", new BytesArray(" {\n" + " \"object\": {\n" + " \"array\":[\n" + " {\n" + @@ -60,7 +61,8 @@ public class ObjectMapperTests extends ESSingleNodeTestCase { " ]\n" + " },\n" + " \"value\":\"value\"\n" + - " }")); + " }"), + XContentType.JSON)); }); assertTrue(e.getMessage(), e.getMessage().contains("different type")); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java index 7728e09c732..3ad53513c51 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java @@ -40,7 +40,8 @@ public class PathMatchDynamicTemplateTests extends ESSingleNodeTestCase { client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("person"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), + XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); docMapper = index.mapperService().documentMapper("person"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java index c065888f911..18a771bb467 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.Arrays; @@ -95,13 +96,14 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); assertEquals(mapping.string(), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -122,13 +124,14 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); assertEquals(mapping.string(), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); @@ -145,13 +148,14 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); assertEquals(mapping.string(), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -170,13 +174,14 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); assertEquals(mapping.string(), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -199,13 +204,14 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase { assertEquals(mapping.string(), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -219,13 +225,14 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase { assertEquals(mapping.string(), mapper2.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), "5.2") .field(getToField(), "10") .endObject() - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), anyOf(containsString("passed as String"), containsString("failed to parse date"))); } @@ -243,26 +250,28 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase { assertEquals(mapping.string(), mapper.mappingSource().toString()); // test null value for min and max - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("field") .nullField(getFromField()) .nullField(getToField()) .endObject() - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("field").length); IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField storedField = fields[1]; assertThat(storedField.stringValue(), containsString(type.equals("date_range") ? Long.MAX_VALUE+"" : getMax(type)+"")); // test null max value - doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .nullField(getToField()) .endObject() - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -293,11 +302,12 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase { assertEquals(mapping.string(), mapper.mappingSource().toString()); // test no bounds specified - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .startObject("field") .endObject() - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java index 9c26a9806e3..fb98f42f105 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java @@ -41,7 +41,8 @@ public class RoutingFieldMapperTests extends ESSingleNodeTestCase { .startObject() .field("field", "value") .endObject() - .bytes(), XContentType.JSON).routing("routing_value")); + .bytes(), + XContentType.JSON).routing("routing_value")); assertThat(doc.rootDoc().get("_routing"), equalTo("routing_value")); assertThat(doc.rootDoc().get("field"), equalTo("value")); @@ -52,8 +53,8 @@ public class RoutingFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { - docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_routing", "foo").endObject().bytes()); + docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + .startObject().field("_routing", "foo").endObject().bytes(),XContentType.JSON)); fail("Expected failure to parse metadata field"); } catch (MapperParsingException e) { assertTrue(e.getMessage(), e.getMessage().contains("Field [_routing] is a metadata field and cannot be added inside a document")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java index 4163378db01..c2d0317ea07 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -60,11 +61,12 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", 123) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -109,11 +111,12 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", 123) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -132,11 +135,12 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", 123) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -155,11 +159,12 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", 123) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(3, fields.length); @@ -183,11 +188,12 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "123") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -206,11 +212,12 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper2.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "123") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("passed as String")); } @@ -225,11 +232,12 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "a") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("For input string: \"a\"")); @@ -240,11 +248,12 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "a") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); @@ -264,11 +273,12 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .nullField("field") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); mapping = XContentFactory.jsonBuilder().startObject() @@ -285,11 +295,12 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .nullField("field") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); IndexableField pointField = fields[0]; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java index e1fdf103564..dd664219867 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; @@ -143,6 +144,15 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { assertNull(ft.stats(reader)); } Document doc = new Document(); + doc.add(new StoredField("scaled_float", -1)); + w.addDocument(doc); + try (DirectoryReader reader = DirectoryReader.open(w)) { + // field exists, but has no point values + FieldStats stats = ft.stats(reader); + assertFalse(stats.hasMinMax()); + assertNull(stats.getMinValue()); + assertNull(stats.getMaxValue()); + } LongPoint point = new LongPoint("scaled_float", -1); doc.add(point); w.addDocument(doc); @@ -152,7 +162,7 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { FieldStats stats = ft.stats(reader); assertEquals(-1/ft.getScalingFactor(), stats.getMinValue()); assertEquals(10/ft.getScalingFactor(), stats.getMaxValue()); - assertEquals(2, stats.getMaxDoc()); + assertEquals(3, stats.getMaxDoc()); } w.deleteAll(); try (DirectoryReader reader = DirectoryReader.open(w)) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index 5803d7d957e..83594d98257 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -51,16 +51,18 @@ public class SourceFieldMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() .field("field", "value") - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON)); documentMapper = parser.parse("type", new CompressedXContent(mapping)); - doc = documentMapper.parse("test", "type", "1", XContentFactory.smileBuilder().startObject() + doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.smileBuilder().startObject() .field("field", "value") - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.SMILE)); } @@ -72,10 +74,11 @@ public class SourceFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() .startObject("path2").field("field2", "value2").endObject() - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); IndexableField sourceField = doc.rootDoc().getField("_source"); Map sourceAsMap; @@ -93,10 +96,11 @@ public class SourceFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() .startObject("path2").field("field2", "value2").endObject() - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); IndexableField sourceField = doc.rootDoc().getField("_source"); Map sourceAsMap; @@ -276,7 +280,7 @@ public class SourceFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { - documentMapper.parse("test", "type", "1", new BytesArray("{}}")); // extra end object (invalid JSON) + documentMapper.parse(SourceToParse.source("test", "type", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object (invalid JSON) fail("Expected parse exception"); } catch (MapperParsingException e) { assertNotNull(e.getRootCause()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index 47b062a42df..438ccd5fa86 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; @@ -63,7 +64,7 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field1", 1) .field("field2", 1) @@ -76,7 +77,8 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { .field("field9", "2016-04-05") .field("field10", true) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); writer.addDocument(doc.rootDoc()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 4df4361db6a..e2dc7bc7a09 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.MapperService.MergeReason; @@ -77,11 +78,12 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -108,11 +110,12 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -128,11 +131,12 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); @@ -150,11 +154,12 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -180,7 +185,8 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { for (String option : supportedOptions.keySet()) { jsonDoc.field(option, "1234"); } - ParsedDocument doc = mapper.parse("test", "type", "1", jsonDoc.endObject().bytes()); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", jsonDoc.endObject().bytes(), + XContentType.JSON)); for (Map.Entry entry : supportedOptions.entrySet()) { String field = entry.getKey(); @@ -201,11 +207,12 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .array("field", new String[] {"a", "b"}) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -240,11 +247,12 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .array("field", new String[] {"a", "b"}) .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -396,7 +404,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("field1", "1234") .field("field2", "1234") @@ -405,7 +413,8 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { .field("field5", "1234") .field("field6", "1234") .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false)); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorOffsets(), equalTo(false)); diff --git a/core/src/test/java/org/elasticsearch/monitor/fs/FsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/fs/FsProbeTests.java index 3d4903e0472..0db1709e92c 100644 --- a/core/src/test/java/org/elasticsearch/monitor/fs/FsProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/fs/FsProbeTests.java @@ -36,6 +36,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; +import java.util.function.Supplier; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -93,26 +95,64 @@ public class FsProbeTests extends ESTestCase { } public void testFsInfoOverflow() throws Exception { - FsInfo.Path pathStats = new FsInfo.Path("/foo/bar", null, - randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()); + final FsInfo.Path pathStats = + new FsInfo.Path( + "/foo/bar", + null, + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong()); - // While not overflowing, keep adding - FsInfo.Path pathToAdd = new FsInfo.Path("/foo/baz", null, - randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()); - while ((pathStats.total + pathToAdd.total) > 0) { - // Add itself as a path, to increase the total bytes until it overflows - logger.info("--> adding {} bytes to {}, will be: {}", pathToAdd.total, pathStats.total, pathToAdd.total + pathStats.total); - pathStats.add(pathToAdd); - pathToAdd = new FsInfo.Path("/foo/baz", null, - randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()); - } + addUntilOverflow( + pathStats, + p -> p.total, + "total", + () -> new FsInfo.Path("/foo/baz", null, randomNonNegativeLong(), 0, 0)); - logger.info("--> adding {} bytes to {}, will be: {}", pathToAdd.total, pathStats.total, pathToAdd.total + pathStats.total); - assertThat(pathStats.total + pathToAdd.total, lessThan(0L)); - pathStats.add(pathToAdd); + addUntilOverflow( + pathStats, + p -> p.free, + "free", + () -> new FsInfo.Path("/foo/baz", null, 0, randomNonNegativeLong(), 0)); - // Even after overflowing, it should not be negative + addUntilOverflow( + pathStats, + p -> p.available, + "available", + () -> new FsInfo.Path("/foo/baz", null, 0, 0, randomNonNegativeLong())); + + // even after overflowing these should not be negative assertThat(pathStats.total, greaterThan(0L)); + assertThat(pathStats.free, greaterThan(0L)); + assertThat(pathStats.available, greaterThan(0L)); + } + + private void addUntilOverflow( + final FsInfo.Path pathStats, + final Function getter, + final String field, + final Supplier supplier) { + FsInfo.Path pathToAdd = supplier.get(); + while ((getter.apply(pathStats) + getter.apply(pathToAdd)) > 0) { + // add a path to increase the total bytes until it overflows + logger.info( + "--> adding {} bytes to {}, {} will be: {}", + getter.apply(pathToAdd), + getter.apply(pathStats), + field, + getter.apply(pathStats) + getter.apply(pathToAdd)); + pathStats.add(pathToAdd); + pathToAdd = supplier.get(); + } + // this overflows + logger.info( + "--> adding {} bytes to {}, {} will be: {}", + getter.apply(pathToAdd), + getter.apply(pathStats), + field, + getter.apply(pathStats) + getter.apply(pathToAdd)); + assertThat(getter.apply(pathStats) + getter.apply(pathToAdd), lessThan(0L)); + pathStats.add(pathToAdd); } public void testIoStats() { diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheActionTests.java new file mode 100644 index 00000000000..25a8f350d9a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheActionTests.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.indices; + +import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; + +import java.util.HashMap; + +import static org.hamcrest.Matchers.equalTo; + +public class RestClearIndicesCacheActionTests extends ESTestCase { + + public void testRequestCacheSet() throws Exception { + final HashMap params = new HashMap<>(); + params.put("request", "true"); + final RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()) + .withParams(params).build(); + ClearIndicesCacheRequest cacheRequest = new ClearIndicesCacheRequest(); + cacheRequest = RestClearIndicesCacheAction.fromRequest(restRequest, cacheRequest); + assertThat(cacheRequest.requestCache(), equalTo(true)); + } +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 65f2965df97..363e972456e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -34,13 +34,20 @@ import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache.Listener; import org.elasticsearch.index.cache.query.DisabledQueryCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; +import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ObjectMapper; +import org.elasticsearch.index.mapper.ObjectMapper.Nested; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; @@ -59,6 +66,7 @@ import java.util.Collections; import java.util.List; import static org.mockito.Matchers.anyObject; +import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -69,6 +77,7 @@ import static org.mockito.Mockito.when; * {@link AggregationBuilder} instance. */ public abstract class AggregatorTestCase extends ESTestCase { + private static final String NESTEDFIELD_PREFIX = "nested_"; private List releasables = new ArrayList<>(); protected A createAggregator(B aggregationBuilder, @@ -119,6 +128,15 @@ public abstract class AggregatorTestCase extends ESTestCase { when(searchContext.searcher()).thenReturn(contextIndexSearcher); when(searchContext.fetchPhase()) .thenReturn(new FetchPhase(Arrays.asList(new FetchSourceSubPhase(), new DocValueFieldsFetchSubPhase()))); + when(searchContext.getObjectMapper(anyString())).thenAnswer(invocation -> { + String fieldName = (String) invocation.getArguments()[0]; + if (fieldName.startsWith(NESTEDFIELD_PREFIX)) { + BuilderContext context = new BuilderContext(indexSettings.getSettings(), new ContentPath()); + return new ObjectMapper.Builder<>(fieldName).nested(Nested.newNested(false, false)).build(context); + } + return null; + }); + when(searchContext.bitsetFilterCache()).thenReturn(new BitsetFilterCache(indexSettings, mock(Listener.class))); doAnswer(invocation -> { /* Store the releasables so we can release them at the end of the test case. This is important because aggregations don't * close their sub-aggregations. This is fairly similar to what the production code does. */ @@ -157,6 +175,8 @@ public abstract class AggregatorTestCase extends ESTestCase { when(queryShardContext.getForField(fieldType)).then(invocation -> fieldType.fielddataBuilder().build(indexSettings, fieldType, new IndexFieldDataCache.None(), circuitBreakerService, mock(MapperService.class))); } + NestedScope nestedScope = new NestedScope(); + when(queryShardContext.nestedScope()).thenReturn(nestedScope); return queryShardContext; } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java index 02cd88f16fa..975c1a0d466 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java @@ -46,7 +46,7 @@ public class BestBucketsDeferringCollectorTests extends AggregatorTestCase { public void testReplay() throws Exception { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); - int numDocs = randomInt(128); + int numDocs = randomIntBetween(1, 128); int maxNumValues = randomInt(16); for (int i = 0; i < numDocs; i++) { Document document = new Document(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java new file mode 100644 index 00000000000..5e260b0cf15 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -0,0 +1,145 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.nested; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.store.Directory; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.TypeFieldMapper; +import org.elasticsearch.index.mapper.UidFieldMapper; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.metrics.max.InternalMax; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class ReverseNestedAggregatorTests extends AggregatorTestCase { + + private static final String VALUE_FIELD_NAME = "number"; + private static final String NESTED_OBJECT = "nested_object"; + private static final String NESTED_AGG = "nestedAgg"; + private static final String REVERSE_AGG_NAME = "reverseNestedAgg"; + private static final String MAX_AGG_NAME = "maxAgg"; + + + public void testNoDocs() throws IOException { + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + // intentionally not writing any docs + } + try (IndexReader indexReader = DirectoryReader.open(directory)) { + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, + NESTED_OBJECT); + ReverseNestedAggregationBuilder reverseNestedBuilder + = new ReverseNestedAggregationBuilder(REVERSE_AGG_NAME); + nestedBuilder.subAggregation(reverseNestedBuilder); + MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME) + .field(VALUE_FIELD_NAME); + reverseNestedBuilder.subAggregation(maxAgg); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( + NumberFieldMapper.NumberType.LONG); + fieldType.setName(VALUE_FIELD_NAME); + + Nested nested = search(newSearcher(indexReader, true, true), + new MatchAllDocsQuery(), nestedBuilder, fieldType); + ReverseNested reverseNested = (ReverseNested) nested.getProperty(REVERSE_AGG_NAME); + assertEquals(REVERSE_AGG_NAME, reverseNested.getName()); + assertEquals(0, reverseNested.getDocCount()); + + InternalMax max = (InternalMax) reverseNested.getProperty(MAX_AGG_NAME); + assertEquals(MAX_AGG_NAME, max.getName()); + assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), Double.MIN_VALUE); + } + } + } + + public void testMaxFromParentDocs() throws IOException { + int numParentDocs = randomIntBetween(1, 20); + int expectedParentDocs = 0; + int expectedNestedDocs = 0; + double expectedMaxValue = Double.NEGATIVE_INFINITY; + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + for (int i = 0; i < numParentDocs; i++) { + List documents = new ArrayList<>(); + int numNestedDocs = randomIntBetween(0, 20); + for (int nested = 0; nested < numNestedDocs; nested++) { + Document document = new Document(); + document.add(new Field(UidFieldMapper.NAME, "type#" + i, + UidFieldMapper.Defaults.NESTED_FIELD_TYPE)); + document.add(new Field(TypeFieldMapper.NAME, "__" + NESTED_OBJECT, + TypeFieldMapper.Defaults.FIELD_TYPE)); + documents.add(document); + expectedNestedDocs++; + } + Document document = new Document(); + document.add(new Field(UidFieldMapper.NAME, "type#" + i, + UidFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(TypeFieldMapper.NAME, "test", + TypeFieldMapper.Defaults.FIELD_TYPE)); + long value = randomNonNegativeLong() % 10000; + document.add(new SortedNumericDocValuesField(VALUE_FIELD_NAME, value)); + if (numNestedDocs > 0) { + expectedMaxValue = Math.max(expectedMaxValue, value); + expectedParentDocs++; + } + documents.add(document); + iw.addDocuments(documents); + } + iw.commit(); + } + try (IndexReader indexReader = DirectoryReader.open(directory)) { + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, + NESTED_OBJECT); + ReverseNestedAggregationBuilder reverseNestedBuilder + = new ReverseNestedAggregationBuilder(REVERSE_AGG_NAME); + nestedBuilder.subAggregation(reverseNestedBuilder); + MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME) + .field(VALUE_FIELD_NAME); + reverseNestedBuilder.subAggregation(maxAgg); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( + NumberFieldMapper.NumberType.LONG); + fieldType.setName(VALUE_FIELD_NAME); + + Nested nested = search(newSearcher(indexReader, true, true), + new MatchAllDocsQuery(), nestedBuilder, fieldType); + assertEquals(expectedNestedDocs, nested.getDocCount()); + + ReverseNested reverseNested = (ReverseNested) nested.getProperty(REVERSE_AGG_NAME); + assertEquals(REVERSE_AGG_NAME, reverseNested.getName()); + assertEquals(expectedParentDocs, reverseNested.getDocCount()); + + InternalMax max = (InternalMax) reverseNested.getProperty(MAX_AGG_NAME); + assertEquals(MAX_AGG_NAME, max.getName()); + assertEquals(expectedMaxValue, max.getValue(), Double.MIN_VALUE); + } + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 7db99ff3232..11496309d47 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -751,52 +751,69 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over")); } - public void testFastVectorHighlighterWithSentenceBoundaryScanner() throws Exception { + public void testHighlighterWithSentenceBoundaryScanner() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); indexRandom(true, client().prepareIndex("test", "type1") .setSource("field1", "A sentence with few words. Another sentence with even more words.")); - logger.info("--> highlighting and searching on 'field' with sentence boundary_scanner"); - SearchSourceBuilder source = searchSource() + for (String type : new String[] {"unified", "fvh"}) { + logger.info("--> highlighting and searching on 'field' with sentence boundary_scanner"); + SearchSourceBuilder source = searchSource() .query(termQuery("field1", "sentence")) .highlighter(highlight() - .field("field1", 20, 2) - .order("score") - .preTags("").postTags("") - .boundaryScannerType(BoundaryScannerType.SENTENCE)); + .field("field1", 21, 2) + .highlighterType(type) + .preTags("").postTags("") + .boundaryScannerType(BoundaryScannerType.SENTENCE)); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + assertHighlight(searchResponse, 0, "field1", 0, 2, anyOf( + equalTo("A sentence with few words"), + equalTo("A sentence with few words. ") + )); - assertHighlight(searchResponse, 0, "field1", 0, 2, equalTo("A sentence with few words. ")); - assertHighlight(searchResponse, 0, "field1", 1, 2, equalTo("Another sentence with even more words. ")); + assertHighlight(searchResponse, 0, "field1", 1, 2, anyOf( + equalTo("Another sentence with"), + equalTo("Another sentence with even more words. ") + )); + } } - public void testFastVectorHighlighterWithSentenceBoundaryScannerAndLocale() throws Exception { + public void testHighlighterWithSentenceBoundaryScannerAndLocale() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); indexRandom(true, client().prepareIndex("test", "type1") .setSource("field1", "A sentence with few words. Another sentence with even more words.")); - logger.info("--> highlighting and searching on 'field' with sentence boundary_scanner"); - SearchSourceBuilder source = searchSource() + for (String type : new String[] {"fvh", "unified"}) { + logger.info("--> highlighting and searching on 'field' with sentence boundary_scanner"); + SearchSourceBuilder source = searchSource() .query(termQuery("field1", "sentence")) .highlighter(highlight() - .field("field1", 20, 2) - .order("score") - .preTags("").postTags("") - .boundaryScannerType(BoundaryScannerType.SENTENCE) - .boundaryScannerLocale(Locale.ENGLISH.toLanguageTag())); + .field("field1", 21, 2) + .highlighterType(type) + .preTags("").postTags("") + .boundaryScannerType(BoundaryScannerType.SENTENCE) + .boundaryScannerLocale(Locale.ENGLISH.toLanguageTag())); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 2, equalTo("A sentence with few words. ")); - assertHighlight(searchResponse, 0, "field1", 1, 2, equalTo("Another sentence with even more words. ")); + assertHighlight(searchResponse, 0, "field1", 0, 2, anyOf( + equalTo("A sentence with few words"), + equalTo("A sentence with few words. ") + )); + + assertHighlight(searchResponse, 0, "field1", 1, 2, anyOf( + equalTo("Another sentence with"), + equalTo("Another sentence with even more words. ") + )); + } } - public void testFastVectorHighlighterWithWordBoundaryScanner() throws Exception { + public void testHighlighterWithWordBoundaryScanner() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); @@ -804,39 +821,48 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog")); logger.info("--> highlighting and searching on 'field' with word boundary_scanner"); - SearchSourceBuilder source = searchSource() - .query(termQuery("field1", "some")) - .highlighter(highlight() - .field("field1", 23, 1) - .order("score") - .preTags("").postTags("") - .boundaryScannerType(BoundaryScannerType.WORD)); + for (String type : new String[] {"unified", "fvh"}) { + SearchSourceBuilder source = searchSource() + .query(termQuery("field1", "some")) + .highlighter(highlight() + .field("field1", 23, 1) + .highlighterType(type) + .preTags("").postTags("") + .boundaryScannerType(BoundaryScannerType.WORD)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("some quick and hairy brown")); + assertHighlight(searchResponse, 0, "field1", 0, 1, anyOf( + equalTo("some quick and hairy brown"), + equalTo("some") + )); + } } - public void testFastVectorHighlighterWithWordBoundaryScannerAndLocale() throws Exception { + public void testHighlighterWithWordBoundaryScannerAndLocale() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); indexRandom(true, client().prepareIndex("test", "type1") .setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog")); - logger.info("--> highlighting and searching on 'field' with word boundary_scanner"); - SearchSourceBuilder source = searchSource() + for (String type : new String[] {"unified", "fvh"}) { + SearchSourceBuilder source = searchSource() .query(termQuery("field1", "some")) .highlighter(highlight() - .field("field1", 23, 1) - .order("score") - .preTags("").postTags("") - .boundaryScannerType(BoundaryScannerType.WORD) - .boundaryScannerLocale(Locale.ENGLISH.toLanguageTag())); + .field("field1", 23, 1) + .highlighterType(type) + .preTags("").postTags("") + .boundaryScannerType(BoundaryScannerType.WORD) + .boundaryScannerLocale(Locale.ENGLISH.toLanguageTag())); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("some quick and hairy brown")); + assertHighlight(searchResponse, 0, "field1", 0, 1, anyOf( + equalTo("some quick and hairy brown"), + equalTo("some") + )); + } } /** @@ -1841,16 +1867,16 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); + // Unified hl also works but the fragment is longer than the plain highlighter because of the boundary is the word + field.highlighterType("unified"); + response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); + assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); + // Postings hl also works but the fragment is the whole first sentence (size ignored) field.highlighterType("postings"); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - // Unified hl also works but the fragment is the whole first sentence (size ignored) - field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - // We can also ask for a fragment longer than the input string and get the whole string field.highlighterType("plain").noMatchSize(text.length() * 2); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); @@ -1860,16 +1886,15 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo(text)); + field.highlighterType("unified"); + response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); + assertHighlight(response, 0, "text", 0, 1, equalTo(text)); + //no difference using postings hl as the noMatchSize is ignored (just needs to be greater than 0) field.highlighterType("postings"); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - //no difference using unified hl as the noMatchSize is ignored (just needs to be greater than 0) - field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - // We can also ask for a fragment exactly the size of the input field and get the whole field field.highlighterType("plain").noMatchSize(text.length()); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); @@ -1879,16 +1904,16 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo(text)); + // unified hl returns the first sentence as the noMatchSize does not cross sentence boundary. + field.highlighterType("unified"); + response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); + assertHighlight(response, 0, "text", 0, 1, equalTo(text)); + //no difference using postings hl as the noMatchSize is ignored (just needs to be greater than 0) field.highlighterType("postings"); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - //no difference using unified hl as the noMatchSize is ignored (just needs to be greater than 0) - field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - // You can set noMatchSize globally in the highlighter as well field.highlighterType("plain").noMatchSize(null); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)).get(); @@ -1898,12 +1923,12 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); - field.highlighterType("postings"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - field.highlighterType("unified"); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)).get(); + assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); + + field.highlighterType("postings"); + response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); // We don't break if noMatchSize is less than zero though @@ -1947,16 +1972,15 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); + field.highlighterType("unified"); + response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); + assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); + // Postings hl also works but the fragment is the whole first sentence (size ignored) field.highlighterType("postings"); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - // Unified hl also works but the fragment is the whole first sentence (size ignored) - field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - // And noMatchSize returns nothing when the first entry is empty string! index("test", "type1", "2", "text", new String[] {"", text2}); refresh(); @@ -1980,11 +2004,12 @@ public class HighlighterSearchIT extends ESIntegTestCase { .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); + // except for the unified highlighter which starts from the first string with actual content field.highlighterType("unified"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) .highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertHighlight(response, 0, "text", 0, 1, equalTo("I am short")); // But if the field was actually empty then you should get no highlighting field index("test", "type1", "3", "text", new String[] {}); @@ -2031,7 +2056,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); - field.highlighterType("fvh"); + field.highlighterType("unified"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) .highlighter(new HighlightBuilder().field(field)).get(); @@ -2081,16 +2106,16 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first sentence")); + field.highlighterType("unified"); + response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); + assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first sentence")); + + // Postings hl also works but the fragment is the whole first sentence (size ignored) field.highlighterType("postings"); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first sentence.")); - // Unified hl also works but the fragment is the whole first sentence (size ignored) - field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first sentence.")); - //if there's a match we only return the values with matches (whole value as number_of_fragments == 0) MatchQueryBuilder queryBuilder = QueryBuilders.matchQuery("text", "third fifth"); field.highlighterType("plain"); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 07502ff3383..5bd2bad31d1 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -68,12 +68,10 @@ import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasScore; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -116,6 +114,36 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { assertSuggestions("foo", prefix, "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6"); } + /** + * test that suggestion works if prefix is either provided via {@link CompletionSuggestionBuilder#text(String)} or + * {@link SuggestBuilder#setGlobalText(String)} + */ + public void testTextAndGlobalText() throws Exception { + final CompletionMappingBuilder mapping = new CompletionMappingBuilder(); + createIndexAndMapping(mapping); + int numDocs = 10; + List indexRequestBuilders = new ArrayList<>(); + for (int i = 1; i <= numDocs; i++) { + indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(jsonBuilder().startObject().startObject(FIELD) + .field("input", "suggestion" + i).field("weight", i).endObject().endObject())); + } + indexRandom(true, indexRequestBuilders); + CompletionSuggestionBuilder noText = SuggestBuilders.completionSuggestion(FIELD); + SearchResponse searchResponse = client().prepareSearch(INDEX) + .suggest(new SuggestBuilder().addSuggestion("foo", noText).setGlobalText("sugg")).execute().actionGet(); + assertSuggestions(searchResponse, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6"); + + CompletionSuggestionBuilder withText = SuggestBuilders.completionSuggestion(FIELD).text("sugg"); + searchResponse = client().prepareSearch(INDEX) + .suggest(new SuggestBuilder().addSuggestion("foo", withText)).execute().actionGet(); + assertSuggestions(searchResponse, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6"); + + // test that suggestion text takes precedence over global text + searchResponse = client().prepareSearch(INDEX) + .suggest(new SuggestBuilder().addSuggestion("foo", withText).setGlobalText("bogus")).execute().actionGet(); + assertSuggestions(searchResponse, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6"); + } + public void testRegex() throws Exception { final CompletionMappingBuilder mapping = new CompletionMappingBuilder(); createIndexAndMapping(mapping); @@ -217,7 +245,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { for (CompletionSuggestion.Entry.Option option : options) { assertThat(option.getText().toString(), equalTo("suggestion" + id)); assertSearchHit(option.getHit(), hasId("" + id)); - assertSearchHit(option.getHit(), hasScore(((float) id))); + assertSearchHit(option.getHit(), hasScore((id))); assertNotNull(option.getHit().getSourceAsMap()); id--; } @@ -252,7 +280,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { for (CompletionSuggestion.Entry.Option option : options) { assertThat(option.getText().toString(), equalTo("suggestion" + id)); assertSearchHit(option.getHit(), hasId("" + id)); - assertSearchHit(option.getHit(), hasScore(((float) id))); + assertSearchHit(option.getHit(), hasScore((id))); assertNull(option.getHit().getSourceAsMap()); id--; } @@ -289,7 +317,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { for (CompletionSuggestion.Entry.Option option : options) { assertThat(option.getText().toString(), equalTo("suggestion" + id)); assertSearchHit(option.getHit(), hasId("" + id)); - assertSearchHit(option.getHit(), hasScore(((float) id))); + assertSearchHit(option.getHit(), hasScore((id))); assertNotNull(option.getHit().getSourceAsMap()); Set sourceFields = option.getHit().getSourceAsMap().keySet(); assertThat(sourceFields, contains("a")); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 7c864320abe..15c05a56226 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -26,12 +26,14 @@ import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextBuilder; @@ -62,7 +64,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() .startObject() .startArray("completion") .startObject() @@ -79,7 +81,8 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 7); } @@ -100,7 +103,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() .startObject() .startArray("completion") .startObject() @@ -112,7 +115,8 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -133,7 +137,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() .startObject() .startObject("completion") .array("input", "suggestion5", "suggestion6", "suggestion7") @@ -143,7 +147,8 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .field("weight", 5) .endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -181,7 +186,8 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", builder.bytes()); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", builder.bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index f4c65b24780..a0a278c831d 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -23,11 +23,13 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.suggest.completion.context.ContextBuilder; import org.elasticsearch.search.suggest.completion.context.ContextMapping; @@ -62,7 +64,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() .startObject() .startArray("completion") .startObject() @@ -79,7 +81,8 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 7); } @@ -101,7 +104,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() .startObject() .startArray("completion") .startObject() @@ -116,7 +119,8 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -137,7 +141,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder() + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() .startObject() .startObject("completion") .array("input", "suggestion5", "suggestion6", "suggestion7") @@ -156,7 +160,8 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .field("weight", 5) .endObject() .endObject() - .bytes()); + .bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } @@ -194,7 +199,8 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject(); - ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", builder.bytes()); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", builder.bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index b552daafb09..77a0514a140 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -845,6 +845,11 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet(); assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(restoreSnapshotResponse.getRestoreInfo().totalShards())); + // we have to delete the index here manually, otherwise the cluster will keep + // trying to allocate the shards for the index, even though the restore operation + // is completed and marked as failed, which can lead to nodes having pending + // cluster states to process in their queue when the test is finished + client.admin().indices().prepareDelete("test-idx").get(); } public void testDeletionOfFailingToRecoverIndexShouldStopRestore() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index eb5c00b76d9..65a54feabbe 100644 --- a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -39,11 +39,14 @@ import org.joda.time.format.ISODateTimeFormat; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.List; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -252,6 +255,37 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { containsString("field:huge field:pidgin"), true); } + public void testExplainWithRewriteValidateQueryAllShards() throws Exception { + client().admin().indices().prepareCreate("test") + .addMapping("type1", "field", "type=text,analyzer=whitespace") + .setSettings(SETTING_NUMBER_OF_SHARDS, 2).get(); + // We are relying on specific routing behaviors for the result to be right, so + // we cannot randomize the number of shards or change ids here. + client().prepareIndex("test", "type1", "1") + .setSource("field", "quick lazy huge brown pidgin").get(); + client().prepareIndex("test", "type1", "2") + .setSource("field", "the quick brown fox").get(); + client().prepareIndex("test", "type1", "3") + .setSource("field", "the quick lazy huge brown fox jumps over the tree").get(); + client().prepareIndex("test", "type1", "4") + .setSource("field", "the lazy dog quacks like a duck").get(); + refresh(); + + // prefix queries + assertExplanations(QueryBuilders.matchPhrasePrefixQuery("field", "qu"), + Arrays.asList( + equalTo("field:quick"), + allOf(containsString("field:quick"), containsString("field:quacks")) + ), true, true); + assertExplanations(QueryBuilders.matchPhrasePrefixQuery("field", "ju"), + Arrays.asList( + equalTo("field:jumps"), + equalTo("+MatchNoDocsQuery(\"empty MultiPhraseQuery\") +MatchNoDocsQuery(\"No " + + "terms supplied for org.elasticsearch.common.lucene.search." + + "MultiPhrasePrefixQuery\")") + ), true, true); + } + public void testIrrelevantPropertiesBeforeQuery() throws IOException { createIndex("test"); ensureGreen(); @@ -280,4 +314,22 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { assertThat(response.getQueryExplanation().get(0).getExplanation(), matcher); assertThat(response.isValid(), equalTo(true)); } + + private static void assertExplanations(QueryBuilder queryBuilder, + List> matchers, boolean withRewrite, + boolean allShards) { + ValidateQueryResponse response = client().admin().indices().prepareValidateQuery("test") + .setTypes("type1") + .setQuery(queryBuilder) + .setExplain(true) + .setRewrite(withRewrite) + .setAllShards(allShards) + .execute().actionGet(); + assertThat(response.getQueryExplanation().size(), equalTo(matchers.size())); + for (int i = 0; i < matchers.size(); i++) { + assertThat(response.getQueryExplanation().get(i).getError(), nullValue()); + assertThat(response.getQueryExplanation().get(i).getExplanation(), matchers.get(i)); + assertThat(response.isValid(), equalTo(true)); + } + } } diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index c35e6a0075a..bfba16efe42 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -44,6 +44,7 @@ import argparse import tempfile import os +from os.path import basename, dirname, isdir, join import signal import shutil import urllib @@ -57,26 +58,14 @@ from urllib.parse import urlparse from http.client import HTTPConnection -DEFAULT_PLUGINS = ["analysis-icu", - "analysis-kuromoji", - "analysis-phonetic", - "analysis-smartcn", - "analysis-stempel", - "discovery-azure-classic", - "discovery-ec2", - "discovery-file", - "discovery-gce", - "ingest-attachment", - "ingest-geoip", - "ingest-user-agent", - "mapper-attachments", - "mapper-murmur3", - "mapper-size", - "repository-azure", - "repository-gcs", - "repository-hdfs", - "repository-s3", - "store-smb"] +def find_official_plugins(): + plugins_dir = join(dirname(dirname(__file__)), 'plugins') + plugins = [] + for plugin in os.listdir(plugins_dir): + if isdir(join(plugins_dir, plugin)): + plugins.append(plugin) + return plugins +DEFAULT_PLUGINS = find_official_plugins() try: JAVA_HOME = os.environ['JAVA_HOME'] diff --git a/distribution/src/main/resources/bin/elasticsearch.in.bat b/distribution/src/main/resources/bin/elasticsearch.in.bat index 98b6a16316c..a2500833872 100644 Binary files a/distribution/src/main/resources/bin/elasticsearch.in.bat and b/distribution/src/main/resources/bin/elasticsearch.in.bat differ diff --git a/distribution/src/main/resources/bin/elasticsearch.in.sh b/distribution/src/main/resources/bin/elasticsearch.in.sh index 58b26a2d6eb..2d224392256 100644 --- a/distribution/src/main/resources/bin/elasticsearch.in.sh +++ b/distribution/src/main/resources/bin/elasticsearch.in.sh @@ -10,4 +10,4 @@ EOF exit 1 fi -ES_CLASSPATH="$ES_HOME/lib/elasticsearch-${project.version}.jar:$ES_HOME/lib/*" +ES_CLASSPATH="$ES_HOME/lib/*" diff --git a/docs/build.gradle b/docs/build.gradle index 1cb86472f70..f8e5ff0dc25 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -73,15 +73,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'reference/analysis/tokenfilters/keyword-repeat-tokenfilter.asciidoc', 'reference/analysis/tokenfilters/limit-token-count-tokenfilter.asciidoc', 'reference/analysis/tokenfilters/lowercase-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/stop-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc', 'reference/cat/snapshots.asciidoc', 'reference/cat/templates.asciidoc', 'reference/cat/thread_pool.asciidoc', @@ -145,6 +136,8 @@ integTestCluster { configFile 'scripts/my_map_script.painless' configFile 'scripts/my_combine_script.painless' configFile 'scripts/my_reduce_script.painless' + configFile 'analysis/synonym.txt' + configFile 'analysis/stemmer_override.txt' configFile 'userdict_ja.txt' configFile 'KeywordTokenizer.rbbi' // Whitelist reindexing from the local node so we can test it. diff --git a/docs/java-api/admin/indices/put-mapping.asciidoc b/docs/java-api/admin/indices/put-mapping.asciidoc index 9b085975077..6c2a5406528 100644 --- a/docs/java-api/admin/indices/put-mapping.asciidoc +++ b/docs/java-api/admin/indices/put-mapping.asciidoc @@ -60,7 +60,7 @@ You can use the same API to update an existing mapping: [source,java] -------------------------------------------------- client.admin().indices().preparePutMapping("twitter") <1> - .setType("tweet") <2> + .setType("user") <2> .setSource("{\n" + <3> " \"properties\": {\n" + " \"user_name\": {\n" + diff --git a/docs/java-api/docs/update-by-query.asciidoc b/docs/java-api/docs/update-by-query.asciidoc index a94899668ef..256ed326794 100644 --- a/docs/java-api/docs/update-by-query.asciidoc +++ b/docs/java-api/docs/update-by-query.asciidoc @@ -1,8 +1,6 @@ [[docs-update-by-query]] == Update By Query API -experimental[The update-by-query API is new and should still be considered experimental. The API may change in ways that are not backwards compatible] - The simplest usage of `updateByQuery` updates each document in an index without changing the source. This usage enables <> or another online diff --git a/docs/java-rest/high-level/document/delete.asciidoc b/docs/java-rest/high-level/document/delete.asciidoc index 36cfc54994e..e9ba8b19408 100644 --- a/docs/java-rest/high-level/document/delete.asciidoc +++ b/docs/java-rest/high-level/document/delete.asciidoc @@ -6,19 +6,20 @@ The most simple Delete Request needs is: -["source","java",subs="attributes,callouts"] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -sys2::[perl -ne 'exit if /end::delete-request/; print if $tag; $tag = $tag || /tag::delete-request/' {docdir}/../../client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java] +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-request] -------------------------------------------------- <1> Index name <2> Type <3> Document id + You can also provide the following properties: -["source","java",subs="attributes,callouts"] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -sys2::[perl -ne 'exit if /end::delete-request-props/; print if $tag; $tag = $tag || /tag::delete-request-props/' {docdir}/../../client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java] +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-request-props] -------------------------------------------------- <1> Timeout <2> Timeout as String @@ -30,17 +31,17 @@ sys2::[perl -ne 'exit if /end::delete-request-props/; print if $tag; $tag = $tag [[java-rest-high-document-delete-sync]] ==== Execution -["source","java",subs="attributes,callouts"] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -sys2::[perl -ne 'exit if /end::delete-execute/; print if $tag; $tag = $tag || /tag::delete-execute/' {docdir}/../../client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java] +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-execute] -------------------------------------------------- [[java-rest-high-document-delete-async]] ==== Asynchronous Execution -["source","java",subs="attributes,callouts"] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -sys2::[perl -ne 'exit if /end::delete-execute-async/; print if $tag; $tag = $tag || /tag::delete-execute-async/' {docdir}/../../client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java] +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-execute-async] -------------------------------------------------- <1> Implement if needed when execution did not throw an exception <2> Implement if needed in case of failure @@ -50,18 +51,17 @@ sys2::[perl -ne 'exit if /end::delete-execute-async/; print if $tag; $tag = $tag In the Delete Response object, you can check for example the result of the operation: -["source","java",subs="attributes,callouts"] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -sys2::[perl -ne 'exit if /end::delete-notfound/; print if $tag; $tag = $tag || /tag::delete-notfound/' {docdir}/../../client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java] +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-notfound] -------------------------------------------------- <1> Do something if we did not find the document which should have been deleted Note that if you have a version conflict because you defined the version within the <>, it will raise an `ElasticsearchException` like: -["source","java",subs="attributes,callouts"] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -sys2::[perl -ne 'exit if /end::delete-conflict/; print if $tag; $tag = $tag || /tag::delete-conflict/' {docdir}/../../client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java] +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-conflict] -------------------------------------------------- <1> We got a version conflict - diff --git a/docs/java-rest/high-level/document/index.asciidoc b/docs/java-rest/high-level/document/index.asciidoc index 15eee483baa..32815794a1b 100644 --- a/docs/java-rest/high-level/document/index.asciidoc +++ b/docs/java-rest/high-level/document/index.asciidoc @@ -1 +1,5 @@ +:doc-tests: {docdir}/../../client/rest-high-level/src/test/java/org/elasticsearch/client/documentation + include::delete.asciidoc[] + +:doc-tests!: diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index 3234b6ae226..e57a452ba52 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -20,7 +20,7 @@ sudo bin/elasticsearch-plugin -h .Running as root ===================== If Elasticsearch was installed using the deb or rpm package then run -`/usr/share/elasticsearch-plugin` as `root` so it can write to the appropriate files on disk. +`/usr/share/elasticsearch/bin/elasticsearch-plugin` as `root` so it can write to the appropriate files on disk. Otherwise run `bin/elasticsearch-plugin` as the user that owns all of the Elasticsearch files. ===================== @@ -80,8 +80,8 @@ to a local Java truststore and pass the location to the script as follows: sudo ES_JAVA_OPTS="-Djavax.net.ssl.trustStore=/path/to/trustStore.jks" bin/elasticsearch-plugin install https://.... ----------------------------------- -[[listing-removing]] -=== Listing and Removing Installed Plugins +[[listing-removing-updating]] +=== Listing, Removing and Updating Installed Plugins [float] === Listing plugins @@ -109,6 +109,18 @@ sudo bin/elasticsearch-plugin remove [pluginname] After a Java plugin has been removed, you will need to restart the node to complete the removal process. +[float] +=== Updating plugins + +Plugins are built for a specific version of Elasticsearch, and therefore must be reinstalled +each time Elasticsearch is updated. + +[source,shell] +----------------------------------- +sudo bin/elasticsearch-plugin remove [pluginname] +sudo bin/elasticsearch-plugin install [pluginname] +----------------------------------- + === Other command line parameters The `plugin` scripts supports a number of other command line parameters: diff --git a/docs/plugins/repository-azure.asciidoc b/docs/plugins/repository-azure.asciidoc index a1f8c6ea81d..e6d8c682ba7 100644 --- a/docs/plugins/repository-azure.asciidoc +++ b/docs/plugins/repository-azure.asciidoc @@ -179,7 +179,7 @@ Example using Java: [source,java] ---- client.admin().cluster().preparePutRepository("my_backup_java1") - .setType("azure").setSettings(Settings.settingsBuilder() + .setType("azure").setSettings(Settings.builder() .put(Storage.CONTAINER, "backup-container") .put(Storage.CHUNK_SIZE, new ByteSizeValue(32, ByteSizeUnit.MB)) ).get(); diff --git a/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc b/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc index 9dba1f2adf0..b6074298e1c 100644 --- a/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc @@ -22,9 +22,9 @@ the issue documents as nested documents. The mapping could look like: "issue" : { "properties" : { - "tags" : { "type" : "text" } + "tags" : { "type" : "text" }, "comments" : { <1> - "type" : "nested" + "type" : "nested", "properties" : { "username" : { "type" : "keyword" }, "comment" : { "type" : "text" } diff --git a/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc index 177c4195bbf..4dac79b6571 100644 --- a/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc @@ -23,14 +23,14 @@ Read more about http://www.regular-expressions.info/catastrophic.html[pathologic For instance a pattern like : -[source,js] +[source,text] -------------------------------------------------- "(([a-z]+)(\d*))" -------------------------------------------------- when matched against: -[source,js] +[source,text] -------------------------------------------------- "abc123def456" -------------------------------------------------- @@ -74,7 +74,7 @@ PUT test When used to analyze the text -[source,js] +[source,java] -------------------------------------------------- import static org.apache.commons.lang.StringEscapeUtils.escapeHtml -------------------------------------------------- @@ -117,7 +117,7 @@ PUT test When the above analyzer is used on an email address like: -[source,js] +[source,text] -------------------------------------------------- john-smith_123@foo-bar.com -------------------------------------------------- @@ -136,14 +136,14 @@ original token will be highlighted, not just the matching subset. For instance, querying the above email address for `"smith"` would highlight: -[source,js] +[source,html] -------------------------------------------------- john-smith_123@foo-bar.com -------------------------------------------------- not: -[source,js] +[source,html] -------------------------------------------------- john-smith_123@foo-bar.com -------------------------------------------------- diff --git a/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc index 6042642027c..93e1eed26b4 100644 --- a/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc @@ -12,8 +12,9 @@ For example: [source,js] -------------------------------------------------- +PUT /my_index { - "index" : { + "settings": { "analysis" : { "analyzer" : { "my_analyzer" : { @@ -31,3 +32,4 @@ For example: } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc index 6e010894f41..33191805fe6 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc @@ -20,15 +20,60 @@ Here is an example: [source,js] -------------------------------------------------- -index : - analysis : - analyzer : - myAnalyzer : - type : custom - tokenizer : standard - filter : [lowercase, custom_stems, porter_stem] - filter: - custom_stems: - type: stemmer_override - rules_path : analysis/custom_stems.txt +PUT /my_index +{ + "settings": { + "analysis" : { + "analyzer" : { + "my_analyzer" : { + "tokenizer" : "standard", + "filter" : ["lowercase", "custom_stems", "porter_stem"] + } + }, + "filter" : { + "custom_stems" : { + "type" : "stemmer_override", + "rules_path" : "analysis/stemmer_override.txt" + } + } + } + } +} -------------------------------------------------- +// CONSOLE + +Where the file looks like: + +[source,stemmer_override] +-------------------------------------------------- +include::{docdir}/../src/test/cluster/config/analysis/stemmer_override.txt[] +-------------------------------------------------- + +You can also define the overrides rules inline: + +[source,js] +-------------------------------------------------- +PUT /my_index +{ + "settings": { + "analysis" : { + "analyzer" : { + "my_analyzer" : { + "tokenizer" : "standard", + "filter" : ["lowercase", "custom_stems", "porter_stem"] + } + }, + "filter" : { + "custom_stems" : { + "type" : "stemmer_override", + "rules" : [ + "running => run", + "stemmer => stemmer" + ] + } + } + } + } +} +-------------------------------------------------- +// CONSOLE diff --git a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc index 548342c521b..a052a4a7a58 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc @@ -6,8 +6,9 @@ filters through a single unified interface. For example: [source,js] -------------------------------------------------- +PUT /my_index { - "index" : { + "settings": { "analysis" : { "analyzer" : { "my_analyzer" : { @@ -25,6 +26,7 @@ filters through a single unified interface. For example: } } -------------------------------------------------- +// CONSOLE The `language`/`name` parameter controls the stemmer with the following available values (the preferred filters are marked in *bold*): @@ -177,4 +179,3 @@ http://clef.isti.cnr.it/2003/WN_web/22.pdf[`light_swedish`] Turkish:: http://snowball.tartarus.org/algorithms/turkish/stemmer.html[*`turkish`*] - diff --git a/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc index f3b5a195662..b20f9c9418d 100644 --- a/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc @@ -47,6 +47,7 @@ PUT /my_index } } ------------------------------------ +// CONSOLE or a predefined language-specific list: @@ -66,6 +67,7 @@ PUT /my_index } } ------------------------------------ +// CONSOLE Elasticsearch provides the following predefined list of languages: diff --git a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc index 5ab498802d9..09707fdeb1c 100644 --- a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc @@ -3,7 +3,7 @@ experimental[] -The `synonym_graph` token filter allows to easily handle synonyms, +The `synonym_graph` token filter allows to easily handle synonyms, including multi-word synonyms correctly during the analysis process. In order to properly handle multi-word synonyms this token filter @@ -13,8 +13,8 @@ http://blog.mikemccandless.com/2012/04/lucenes-tokenstreams-are-actually.html[Lu ["NOTE",id="synonym-graph-index-note"] =============================== -This token filter is designed to be used as part of a search analyzer -only. If you want to apply synonyms during indexing please use the +This token filter is designed to be used as part of a search analyzer +only. If you want to apply synonyms during indexing please use the standard <>. =============================== @@ -23,30 +23,34 @@ Here is an example: [source,js] -------------------------------------------------- +PUT /test_index { - "index" : { - "analysis" : { - "analyzer" : { - "search_synonyms" : { - "tokenizer" : "whitespace", - "filter" : ["graph_synonyms"] - } - }, - "filter" : { - "graph_synonyms" : { - "type" : "synonym_graph", - "synonyms_path" : "analysis/synonym.txt" + "settings": { + "index" : { + "analysis" : { + "analyzer" : { + "search_synonyms" : { + "tokenizer" : "whitespace", + "filter" : ["graph_synonyms"] + } + }, + "filter" : { + "graph_synonyms" : { + "type" : "synonym_graph", + "synonyms_path" : "analysis/synonym.txt" + } } } } } } -------------------------------------------------- +// CONSOLE The above configures a `search_synonyms` filter, with a path of `analysis/synonym.txt` (relative to the `config` location). The -`search_synonyms` analyzer is then configured with the filter. -Additional settings are: `ignore_case` (defaults to `false`), and +`search_synonyms` analyzer is then configured with the filter. +Additional settings are: `ignore_case` (defaults to `false`), and `expand` (defaults to `true`). The `tokenizer` parameter controls the tokenizers that will be used to @@ -59,39 +63,9 @@ Two synonym formats are supported: Solr, WordNet. The following is a sample format of the file: -[source,js] +[source,synonyms] -------------------------------------------------- -# Blank lines and lines starting with pound are comments. - -# Explicit mappings match any token sequence on the LHS of "=>" -# and replace with all alternatives on the RHS. These types of mappings -# ignore the expand parameter in the schema. -# Examples: -i-pod, i pod => ipod, -sea biscuit, sea biscit => seabiscuit - -# Equivalent synonyms may be separated with commas and give -# no explicit mapping. In this case the mapping behavior will -# be taken from the expand parameter in the schema. This allows -# the same synonym file to be used in different synonym handling strategies. -# Examples: -ipod, i-pod, i pod -foozball , foosball -universe , cosmos -lol, laughing out loud - -# If expand==true, "ipod, i-pod, i pod" is equivalent -# to the explicit mapping: -ipod, i-pod, i pod => ipod, i-pod, i pod -# If expand==false, "ipod, i-pod, i pod" is equivalent -# to the explicit mapping: -ipod, i-pod, i pod => ipod - -# Multiple synonym mapping entries are merged. -foo => foo bar -foo => baz -# is equivalent to -foo => foo bar, baz +include::{docdir}/../src/test/cluster/config/analysis/synonym.txt[] -------------------------------------------------- You can also define synonyms for the filter directly in the @@ -99,18 +73,26 @@ configuration file (note use of `synonyms` instead of `synonyms_path`): [source,js] -------------------------------------------------- +PUT /test_index { - "filter" : { - "synonym" : { - "type" : "synonym_graph", - "synonyms" : [ - "lol, laughing out loud", - "universe, cosmos" - ] + "settings": { + "index" : { + "analysis" : { + "filter" : { + "synonym" : { + "type" : "synonym_graph", + "synonyms" : [ + "lol, laughing out loud", + "universe, cosmos" + ] + } + } + } } } } -------------------------------------------------- +// CONSOLE However, it is recommended to define large synonyms set in a file using `synonyms_path`, because specifying them inline increases cluster size unnecessarily. @@ -123,20 +105,28 @@ declared using `format`: [source,js] -------------------------------------------------- +PUT /test_index { - "filter" : { - "synonym" : { - "type" : "synonym_graph", - "format" : "wordnet", - "synonyms" : [ - "s(100000001,1,'abstain',v,1,0).", - "s(100000001,2,'refrain',v,1,0).", - "s(100000001,3,'desist',v,1,0)." - ] + "settings": { + "index" : { + "analysis" : { + "filter" : { + "synonym" : { + "type" : "synonym_graph", + "format" : "wordnet", + "synonyms" : [ + "s(100000001,1,'abstain',v,1,0).", + "s(100000001,2,'refrain',v,1,0).", + "s(100000001,3,'desist',v,1,0)." + ] + } + } + } } } } -------------------------------------------------- +// CONSOLE Using `synonyms_path` to define WordNet synonyms in a file is supported as well. diff --git a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc index 12da352b51c..c4961d1e5f9 100644 --- a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc @@ -7,25 +7,29 @@ Here is an example: [source,js] -------------------------------------------------- +PUT /test_index { - "index" : { - "analysis" : { - "analyzer" : { - "synonym" : { - "tokenizer" : "whitespace", - "filter" : ["synonym"] - } - }, - "filter" : { - "synonym" : { - "type" : "synonym", - "synonyms_path" : "analysis/synonym.txt" + "settings": { + "index" : { + "analysis" : { + "analyzer" : { + "synonym" : { + "tokenizer" : "whitespace", + "filter" : ["synonym"] + } + }, + "filter" : { + "synonym" : { + "type" : "synonym", + "synonyms_path" : "analysis/synonym.txt" + } } } } } } -------------------------------------------------- +// CONSOLE The above configures a `synonym` filter, with a path of `analysis/synonym.txt` (relative to the `config` location). The @@ -43,38 +47,9 @@ Two synonym formats are supported: Solr, WordNet. The following is a sample format of the file: -[source,js] +[source,synonyms] -------------------------------------------------- -# Blank lines and lines starting with pound are comments. - -# Explicit mappings match any token sequence on the LHS of "=>" -# and replace with all alternatives on the RHS. These types of mappings -# ignore the expand parameter in the schema. -# Examples: -i-pod, i pod => ipod, -sea biscuit, sea biscit => seabiscuit - -# Equivalent synonyms may be separated with commas and give -# no explicit mapping. In this case the mapping behavior will -# be taken from the expand parameter in the schema. This allows -# the same synonym file to be used in different synonym handling strategies. -# Examples: -ipod, i-pod, i pod -foozball , foosball -universe , cosmos - -# If expand==true, "ipod, i-pod, i pod" is equivalent -# to the explicit mapping: -ipod, i-pod, i pod => ipod, i-pod, i pod -# If expand==false, "ipod, i-pod, i pod" is equivalent -# to the explicit mapping: -ipod, i-pod, i pod => ipod - -# Multiple synonym mapping entries are merged. -foo => foo bar -foo => baz -# is equivalent to -foo => foo bar, baz +include::{docdir}/../src/test/cluster/config/analysis/synonym.txt[] -------------------------------------------------- You can also define synonyms for the filter directly in the @@ -82,18 +57,26 @@ configuration file (note use of `synonyms` instead of `synonyms_path`): [source,js] -------------------------------------------------- +PUT /test_index { - "filter" : { - "synonym" : { - "type" : "synonym", - "synonyms" : [ - "i-pod, i pod => ipod", - "universe, cosmos" - ] + "settings": { + "index" : { + "analysis" : { + "filter" : { + "synonym" : { + "type" : "synonym", + "synonyms" : [ + "i-pod, i pod => ipod", + "universe, cosmos" + ] + } + } + } } } } -------------------------------------------------- +// CONSOLE However, it is recommended to define large synonyms set in a file using `synonyms_path`, because specifying them inline increases cluster size unnecessarily. @@ -106,20 +89,28 @@ declared using `format`: [source,js] -------------------------------------------------- +PUT /test_index { - "filter" : { - "synonym" : { - "type" : "synonym", - "format" : "wordnet", - "synonyms" : [ - "s(100000001,1,'abstain',v,1,0).", - "s(100000001,2,'refrain',v,1,0).", - "s(100000001,3,'desist',v,1,0)." - ] + "settings": { + "index" : { + "analysis" : { + "filter" : { + "synonym" : { + "type" : "synonym", + "format" : "wordnet", + "synonyms" : [ + "s(100000001,1,'abstain',v,1,0).", + "s(100000001,2,'refrain',v,1,0).", + "s(100000001,3,'desist',v,1,0)." + ] + } + } + } } } } -------------------------------------------------- +// CONSOLE Using `synonyms_path` to define WordNet synonyms in a file is supported as well. diff --git a/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc index 01176fa5636..c221075b49f 100644 --- a/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc @@ -75,7 +75,7 @@ Advance settings include: A custom type mapping table, for example (when configured using `type_table_path`): -[source,js] +[source,type_table] -------------------------------------------------- # Map the $, %, '.', and ',' characters to DIGIT # This might be useful for financial data. @@ -94,4 +94,3 @@ NOTE: Using a tokenizer like the `standard` tokenizer may interfere with the `catenate_*` and `preserve_original` parameters, as the original string may already have lost punctuation during tokenization. Instead, you may want to use the `whitespace` tokenizer. - diff --git a/docs/reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc index edb3f3b5590..009b027b9ef 100644 --- a/docs/reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc @@ -64,7 +64,7 @@ Advance settings include: A custom type mapping table, for example (when configured using `type_table_path`): -[source,js] +[source,type_table] -------------------------------------------------- # Map the $, %, '.', and ',' characters to DIGIT # This might be useful for financial data. @@ -83,4 +83,3 @@ NOTE: Using a tokenizer like the `standard` tokenizer may interfere with the `catenate_*` and `preserve_original` parameters, as the original string may already have lost punctuation during tokenization. Instead, you may want to use the `whitespace` tokenizer. - diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 21bd539a4ea..7176f885831 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -506,8 +506,8 @@ the unit, like `2d` for 2 days. The supported units are: === Byte size units Whenever the byte size of data needs to be specified, eg when setting a buffer size -parameter, the value must specify the unit, like `10kb` for 10 kilobytes. The -supported units are: +parameter, the value must specify the unit, like `10kb` for 10 kilobytes. Note that +these units use powers of 1024, so `1kb` means 1024 bytes. The supported units are: [horizontal] `b`:: Bytes diff --git a/docs/reference/cluster/allocation-explain.asciidoc b/docs/reference/cluster/allocation-explain.asciidoc index 8336cf73a79..8749970aeb2 100644 --- a/docs/reference/cluster/allocation-explain.asciidoc +++ b/docs/reference/cluster/allocation-explain.asciidoc @@ -5,7 +5,7 @@ The purpose of the cluster allocation explain API is to provide explanations for shard allocations in the cluster. For unassigned shards, the explain API provides an explanation for why the shard is unassigned. For assigned shards, the explain API provides an explanation for why the -shard is remaining on its current moved and has not moved or rebalanced to +shard is remaining on its current node and has not moved or rebalanced to another node. This API can be very useful when attempting to diagnose why a shard is unassigned or why a shard continues to remain on its current node when you might expect otherwise. diff --git a/docs/reference/docs/delete-by-query.asciidoc b/docs/reference/docs/delete-by-query.asciidoc index db5804d5984..1e26aac6d61 100644 --- a/docs/reference/docs/delete-by-query.asciidoc +++ b/docs/reference/docs/delete-by-query.asciidoc @@ -1,8 +1,6 @@ [[docs-delete-by-query]] == Delete By Query API -experimental[The delete-by-query API is new and should still be considered experimental. The API may change in ways that are not backwards compatible] - The simplest usage of `_delete_by_query` just performs a deletion on every document that match a query. Here is the API: diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index 0207a372fec..bd670fdf84b 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -1,8 +1,6 @@ [[docs-reindex]] == Reindex API -experimental[The reindex API is new and should still be considered experimental. The API may change in ways that are not backwards compatible] - IMPORTANT: Reindex does not attempt to set up the destination index. It does not copy the settings of the source index. You should set up the destination index prior to running a `_reindex` action, including setting up mappings, shard diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index d6acabd87f1..970863041a9 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1788,7 +1788,7 @@ Converts a string to its uppercase equivalent. Expands a field with dots into an object field. This processor allows fields with dots in the name to be accessible by other processors in the pipeline. -Otherwise these < can't be accessed by any processor. +Otherwise these <> can't be accessed by any processor. [[dot-expender-options]] .Dot Expand Options diff --git a/docs/reference/mapping/fields/parent-field.asciidoc b/docs/reference/mapping/fields/parent-field.asciidoc index 9197c7184ea..82343b1a908 100644 --- a/docs/reference/mapping/fields/parent-field.asciidoc +++ b/docs/reference/mapping/fields/parent-field.asciidoc @@ -121,11 +121,17 @@ Global ordinals need to be rebuilt after any change to a shard. The more parent id values are stored in a shard, the longer it takes to rebuild the global ordinals for the `_parent` field. -Global ordinals, by default, are built lazily: the first parent-child query or -aggregation after a refresh will trigger building of global ordinals. This can -introduce a significant latency spike for your users. You can use -<> to shift the cost of building global -ordinals from query time to refresh time, by mapping the `_parent` field as follows: +Global ordinals, by default, are built eagerly: if the index has changed, +global ordinals for the `_parent` field will be rebuilt as part of the refresh. +This can add significant time the refresh. However most of the times this is the +right trade-off, otherwise global ordinals are rebuilt when the first parent-child +query or aggregation is used. This can introduce a significant latency spike for +your users and usually this is worse as multiple global ordinals for the `_parent` +field may be attempt rebuilt within a single refresh interval when many writes +are occurring. + +When the parent/child is used infrequently and writes occur frequently it may +make sense to disable eager loading: [source,js] -------------------------------------------------- @@ -136,7 +142,7 @@ PUT my_index "my_child": { "_parent": { "type": "my_parent", - "eager_global_ordinals": true + "eager_global_ordinals": false } } } diff --git a/docs/reference/migration/migrate_6_0/scripting.asciidoc b/docs/reference/migration/migrate_6_0/scripting.asciidoc index 8a1eeaf6392..67402009223 100644 --- a/docs/reference/migration/migrate_6_0/scripting.asciidoc +++ b/docs/reference/migration/migrate_6_0/scripting.asciidoc @@ -8,7 +8,7 @@ Use painless instead. ==== Date fields now return dates -`doc.some_date_field.value` now returns `ReadableDateTime`s instead of +`doc.some_date_field.value` now returns ++ReadableDateTime++s instead of milliseconds since epoch as a `long`. The same is true for `doc.some_date_field[some_number]`. Use `doc.some_date_field.value.millis` to fetch the milliseconds since epoch if you need it. diff --git a/docs/reference/modules/indices/request_cache.asciidoc b/docs/reference/modules/indices/request_cache.asciidoc index 22c203b4865..e3896f718d9 100644 --- a/docs/reference/modules/indices/request_cache.asciidoc +++ b/docs/reference/modules/indices/request_cache.asciidoc @@ -42,7 +42,7 @@ The cache can be expired manually with the <> and <>; this can be useful for configuring a single instance to be reachable via -HTTP for testing purposes without triggering production mode. +HTTP for testing purposes without triggering production mode. If you do +want to force enforcement of the bootstrap checks independent of the +binding of the transport protocal, you can set the system property +`es.enforce.bootstrap.checks` to `true` (this can be useful on a +single-node production system that does not bind transport to an external +interface). === Heap size check diff --git a/docs/reference/setup/install/deb.asciidoc b/docs/reference/setup/install/deb.asciidoc index ca121976145..c73ed327e90 100644 --- a/docs/reference/setup/install/deb.asciidoc +++ b/docs/reference/setup/install/deb.asciidoc @@ -114,7 +114,7 @@ endif::[] ifeval::["{release-state}"!="unreleased"] -The Debian package for Elastisearch v{version} can be downloaded from the website and installed as follows: +The Debian package for Elasticsearch v{version} can be downloaded from the website and installed as follows: ["source","sh",subs="attributes"] -------------------------------------------- diff --git a/docs/reference/setup/install/rpm.asciidoc b/docs/reference/setup/install/rpm.asciidoc index b737fbff127..269eb6720e6 100644 --- a/docs/reference/setup/install/rpm.asciidoc +++ b/docs/reference/setup/install/rpm.asciidoc @@ -99,7 +99,7 @@ endif::[] ifeval::["{release-state}"!="unreleased"] -The RPM for Elastisearch v{version} can be downloaded from the website and installed as follows: +The RPM for Elasticsearch v{version} can be downloaded from the website and installed as follows: ["source","sh",subs="attributes"] -------------------------------------------- diff --git a/docs/reference/setup/sysconfig/swap.asciidoc b/docs/reference/setup/sysconfig/swap.asciidoc index 19b6f751ee7..78ca7d40bee 100644 --- a/docs/reference/setup/sysconfig/swap.asciidoc +++ b/docs/reference/setup/sysconfig/swap.asciidoc @@ -33,7 +33,7 @@ After starting Elasticsearch, you can see whether this setting was applied successfully by checking the value of `mlockall` in the output from this request: -[source,sh] +[source,js] -------------- GET _nodes?filter_path=**.mlockall -------------- diff --git a/docs/src/test/cluster/config/analysis/stemmer_override.txt b/docs/src/test/cluster/config/analysis/stemmer_override.txt new file mode 100644 index 00000000000..6f6cd771cf5 --- /dev/null +++ b/docs/src/test/cluster/config/analysis/stemmer_override.txt @@ -0,0 +1,3 @@ +running => run + +stemmer => stemmer diff --git a/docs/src/test/cluster/config/analysis/synonym.txt b/docs/src/test/cluster/config/analysis/synonym.txt new file mode 100644 index 00000000000..b51d975d8f5 --- /dev/null +++ b/docs/src/test/cluster/config/analysis/synonym.txt @@ -0,0 +1,31 @@ +# Blank lines and lines starting with pound are comments. + +# Explicit mappings match any token sequence on the LHS of "=>" +# and replace with all alternatives on the RHS. These types of mappings +# ignore the expand parameter in the schema. +# Examples: +i-pod, i pod => ipod, +sea biscuit, sea biscit => seabiscuit + +# Equivalent synonyms may be separated with commas and give +# no explicit mapping. In this case the mapping behavior will +# be taken from the expand parameter in the schema. This allows +# the same synonym file to be used in different synonym handling strategies. +# Examples: +ipod, i-pod, i pod +foozball , foosball +universe , cosmos +lol, laughing out loud + +# If expand==true, "ipod, i-pod, i pod" is equivalent +# to the explicit mapping: +ipod, i-pod, i pod => ipod, i-pod, i pod +# If expand==false, "ipod, i-pod, i pod" is equivalent +# to the explicit mapping: +ipod, i-pod, i pod => ipod + +# Multiple synonym mapping entries are merged. +foo => foo bar +foo => baz +# is equivalent to +foo => foo bar, baz diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 index f60d48efcc7..6ab6a861135 100644 --- a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 @@ -120,7 +120,7 @@ INTEGER: ( '0' | [1-9] [0-9]* ) [lLfFdD]?; DECIMAL: ( '0' | [1-9] [0-9]* ) (DOT [0-9]+)? ( [eE] [+\-]? [0-9]+ )? [fFdD]?; STRING: ( '"' ( '\\"' | '\\\\' | ~[\\"] )*? '"' ) | ( '\'' ( '\\\'' | '\\\\' | ~[\\'] )*? '\'' ); -REGEX: '/' ( ~('/' | '\n') | '\\' ~'\n' )+ '/' [cilmsUux]* { slashIsRegex() }?; +REGEX: '/' ( '\\' ~'\n' | ~('/' | '\n') )+? '/' [cilmsUux]* { slashIsRegex() }?; TRUE: 'true'; FALSE: 'false'; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java index 44972061b59..fd32c59b4ff 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java @@ -1,7 +1,5 @@ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.painless.antlr; - - import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.Token; @@ -211,29 +209,29 @@ abstract class PainlessLexer extends Lexer { "\3N\3N\7N\u021a\nN\fN\16N\u021d\13N\3N\3N\3O\3O\3O\3O\3O\3P\3P\3P\3P\3"+ "P\3P\3Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\7R\u0235\nR\fR\16R\u0238\13R\3R\3R\3S"+ "\3S\7S\u023e\nS\fS\16S\u0241\13S\3T\3T\3T\7T\u0246\nT\fT\16T\u0249\13"+ - "T\5T\u024b\nT\3T\3T\3U\3U\7U\u0251\nU\fU\16U\u0254\13U\3U\3U\6\u00b9\u00c3"+ - "\u01fd\u0209\2V\4\3\6\4\b\5\n\6\f\7\16\b\20\t\22\n\24\13\26\f\30\r\32"+ - "\16\34\17\36\20 \21\"\22$\23&\24(\25*\26,\27.\30\60\31\62\32\64\33\66"+ - "\348\35:\36<\37> @!B\"D#F$H%J&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63f\64"+ - "h\65j\66l\67n8p9r:t;v|?~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008a"+ - "F\u008cG\u008eH\u0090I\u0092J\u0094K\u0096L\u0098M\u009aN\u009cO\u009e"+ - "P\u00a0Q\u00a2R\u00a4S\u00a6T\u00a8U\u00aaV\4\2\3\25\5\2\13\f\17\17\""+ - "\"\4\2\f\f\17\17\3\2\629\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2\63;\3\2\62;\b"+ - "\2FFHHNNffhhnn\4\2GGgg\4\2--//\6\2FFHHffhh\4\2$$^^\4\2))^^\4\2\f\f\61"+ - "\61\3\2\f\f\t\2WWeekknouuwwzz\5\2C\\aac|\6\2\62;C\\aac|\u0277\2\4\3\2"+ - "\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2\16\3\2\2\2\2\20"+ - "\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30\3\2\2\2\2\32\3\2"+ - "\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2\2$\3\2\2\2\2&\3"+ - "\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60\3\2\2\2\2\62\3"+ - "\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2\2<\3\2\2\2\2>\3"+ - "\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H\3\2\2\2\2J\3\2\2"+ - "\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2\2\2\2V\3\2\2\2\2"+ - "X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2\2\2b\3\2\2\2\2d\3"+ - "\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2n\3\2\2\2\2p\3\2\2"+ - "\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3\2\2\2\2|\3\2\2\2\2"+ - "~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3\2\2\2\2\u0086\3\2"+ - "\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2\2\u008e\3\2\2\2\2"+ - "\u0090\3\2\2\2\2\u0092\3\2\2\2\2\u0094\3\2\2\2\2\u0096\3\2\2\2\2\u0098"+ + "T\5T\u024b\nT\3T\3T\3U\3U\7U\u0251\nU\fU\16U\u0254\13U\3U\3U\7\u00b9\u00c3"+ + "\u01fd\u0209\u0215\2V\4\3\6\4\b\5\n\6\f\7\16\b\20\t\22\n\24\13\26\f\30"+ + "\r\32\16\34\17\36\20 \21\"\22$\23&\24(\25*\26,\27.\30\60\31\62\32\64\33"+ + "\66\348\35:\36<\37> @!B\"D#F$H%J&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63"+ + "f\64h\65j\66l\67n8p9r:t;v|?~@\u0080A\u0082B\u0084C\u0086D\u0088E"+ + "\u008aF\u008cG\u008eH\u0090I\u0092J\u0094K\u0096L\u0098M\u009aN\u009c"+ + "O\u009eP\u00a0Q\u00a2R\u00a4S\u00a6T\u00a8U\u00aaV\4\2\3\25\5\2\13\f\17"+ + "\17\"\"\4\2\f\f\17\17\3\2\629\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2\63;\3\2"+ + "\62;\b\2FFHHNNffhhnn\4\2GGgg\4\2--//\6\2FFHHffhh\4\2$$^^\4\2))^^\3\2\f"+ + "\f\4\2\f\f\61\61\t\2WWeekknouuwwzz\5\2C\\aac|\6\2\62;C\\aac|\u0277\2\4"+ + "\3\2\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2\16\3\2\2\2"+ + "\2\20\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30\3\2\2\2\2\32"+ + "\3\2\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2\2$\3\2\2\2\2"+ + "&\3\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60\3\2\2\2\2\62"+ + "\3\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2\2<\3\2\2\2\2"+ + ">\3\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H\3\2\2\2\2J\3"+ + "\2\2\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2\2\2\2V\3\2\2"+ + "\2\2X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2\2\2b\3\2\2\2"+ + "\2d\3\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2n\3\2\2\2\2p"+ + "\3\2\2\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3\2\2\2\2|\3\2"+ + "\2\2\2~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3\2\2\2\2\u0086"+ + "\3\2\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2\2\u008e\3\2\2"+ + "\2\2\u0090\3\2\2\2\2\u0092\3\2\2\2\2\u0094\3\2\2\2\2\u0096\3\2\2\2\2\u0098"+ "\3\2\2\2\2\u009a\3\2\2\2\2\u009c\3\2\2\2\2\u009e\3\2\2\2\2\u00a0\3\2\2"+ "\2\2\u00a2\3\2\2\2\2\u00a4\3\2\2\2\2\u00a6\3\2\2\2\3\u00a8\3\2\2\2\3\u00aa"+ "\3\2\2\2\4\u00ad\3\2\2\2\6\u00c8\3\2\2\2\b\u00cc\3\2\2\2\n\u00ce\3\2\2"+ @@ -358,9 +356,9 @@ abstract class PainlessLexer extends Lexer { "\3\2\2\2\u0207\u0206\3\2\2\2\u0208\u020b\3\2\2\2\u0209\u020a\3\2\2\2\u0209"+ "\u0207\3\2\2\2\u020a\u020c\3\2\2\2\u020b\u0209\3\2\2\2\u020c\u020e\7)"+ "\2\2\u020d\u01f5\3\2\2\2\u020d\u0201\3\2\2\2\u020e\u009b\3\2\2\2\u020f"+ - "\u0213\7\61\2\2\u0210\u0214\n\20\2\2\u0211\u0212\7^\2\2\u0212\u0214\n"+ - "\21\2\2\u0213\u0210\3\2\2\2\u0213\u0211\3\2\2\2\u0214\u0215\3\2\2\2\u0215"+ - "\u0213\3\2\2\2\u0215\u0216\3\2\2\2\u0216\u0217\3\2\2\2\u0217\u021b\7\61"+ + "\u0213\7\61\2\2\u0210\u0211\7^\2\2\u0211\u0214\n\20\2\2\u0212\u0214\n"+ + "\21\2\2\u0213\u0210\3\2\2\2\u0213\u0212\3\2\2\2\u0214\u0215\3\2\2\2\u0215"+ + "\u0216\3\2\2\2\u0215\u0213\3\2\2\2\u0216\u0217\3\2\2\2\u0217\u021b\7\61"+ "\2\2\u0218\u021a\t\22\2\2\u0219\u0218\3\2\2\2\u021a\u021d\3\2\2\2\u021b"+ "\u0219\3\2\2\2\u021b\u021c\3\2\2\2\u021c\u021e\3\2\2\2\u021d\u021b\3\2"+ "\2\2\u021e\u021f\6N\3\2\u021f\u009d\3\2\2\2\u0220\u0221\7v\2\2\u0221\u0222"+ diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java index 1d1f41948c4..4b38868b1b1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java @@ -68,8 +68,9 @@ public final class ERegex extends AExpression { try { Pattern.compile(pattern, flags); - } catch (PatternSyntaxException exception) { - throw createError(exception); + } catch (PatternSyntaxException e) { + throw new Location(location.getSourceName(), location.getOffset() + 1 + e.getIndex()).createError( + new IllegalArgumentException("Error compiling regex: " + e.getDescription())); } constant = new Constant(location, Definition.PATTERN_TYPE.type, "regexAt$" + location.getOffset(), this::initializeConstant); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java index 1c53692ad74..83a592b3f26 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.script.ScriptException; import java.nio.CharBuffer; import java.util.Arrays; @@ -44,8 +45,17 @@ public class RegexTests extends ScriptTestCase { assertEquals(false, exec("return 'bar' ==~ /foo/")); } - public void testSlashesEscapePattern() { - assertEquals(true, exec("return '//' ==~ /\\/\\//")); + public void testBackslashEscapesForwardSlash() { + assertEquals(true, exec("'//' ==~ /\\/\\//")); + } + + public void testBackslashEscapeBackslash() { + // Both of these are single backslashes but java escaping + Painless escaping.... + assertEquals(true, exec("'\\\\' ==~ /\\\\/")); + } + + public void testRegexIsNonGreedy() { + assertEquals(true, exec("def s = /\\\\/.split('.\\\\.'); return s[1] ==~ /\\./")); } public void testPatternAfterAssignment() { @@ -248,11 +258,14 @@ public class RegexTests extends ScriptTestCase { } public void testBadRegexPattern() { - PatternSyntaxException e = expectScriptThrows(PatternSyntaxException.class, () -> { + ScriptException e = expectThrows(ScriptException.class, () -> { exec("/\\ujjjj/"); // Invalid unicode }); - assertThat(e.getMessage(), containsString("Illegal Unicode escape sequence near index 2")); - assertThat(e.getMessage(), containsString("\\ujjjj")); + assertEquals("Error compiling regex: Illegal Unicode escape sequence", e.getCause().getMessage()); + + // And make sure the location of the error points to the offset inside the pattern + assertEquals("/\\ujjjj/", e.getScriptStack().get(0)); + assertEquals(" ^---- HERE", e.getScriptStack().get(1)); } public void testRegexAgainstNumber() { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 244091595b3..cd702784ee5 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; @@ -52,6 +53,7 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoostingQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; @@ -244,9 +246,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { public void testPercolatorFieldMapper() throws Exception { addQueryMapping(); QueryBuilder queryBuilder = termQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() - .field(fieldName, queryBuilder) - .endObject().bytes()); + ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", XContentFactory + .jsonBuilder() + .startObject() + .field(fieldName, queryBuilder) + .endObject().bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(1)); assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name())[0].binaryValue().utf8ToString(), equalTo("field\0value")); @@ -259,9 +264,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { // add an query for which we don't extract terms from queryBuilder = rangeQuery("field").from("a").to("z"); - doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() + doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", XContentFactory + .jsonBuilder() + .startObject() .field(fieldName, queryBuilder) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1)); assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), equalTo(EXTRACTION_FAILED)); @@ -282,10 +290,11 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { // (it can't use shard data for rewriting purposes, because percolator queries run on MemoryIndex) for (QueryBuilder query : queries) { - ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", + ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", XContentFactory.jsonBuilder().startObject() .field(fieldName, query) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, query); } @@ -295,9 +304,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { addQueryMapping(); client().prepareIndex("remote", "type", "1").setSource("field", "value").get(); QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "type", "1", "field")); - ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", XContentFactory + .jsonBuilder() + .startObject() .field(fieldName, queryBuilder) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, queryBuilder.rewrite(indexService.newQueryShardContext( randomInt(20), null, () -> { throw new UnsupportedOperationException(); }))); @@ -307,9 +319,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { public void testPercolatorFieldMapperUnMappedField() throws Exception { addQueryMapping(); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", XContentFactory + .jsonBuilder() + .startObject() .field(fieldName, termQuery("unmapped_field", "value")) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); }); assertThat(exception.getCause(), instanceOf(QueryShardException.class)); assertThat(exception.getCause().getMessage(), equalTo("No field mapping can be found for the field with name [unmapped_field]")); @@ -318,14 +333,21 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { public void testPercolatorFieldMapper_noQuery() throws Exception { addQueryMapping(); - ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() - .endObject().bytes()); + ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", XContentFactory + .jsonBuilder() + .startObject() + .endObject() + .bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0)); try { - mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() - .nullField(fieldName) - .endObject().bytes()); + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", XContentFactory + .jsonBuilder() + .startObject() + .nullField(fieldName) + .endObject().bytes(), + XContentType.JSON)); } catch (MapperParsingException e) { assertThat(e.getDetailedMessage(), containsString("query malformed, must start with start_object")); } @@ -357,12 +379,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); QueryBuilder queryBuilder = matchQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", + ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .field("query_field1", queryBuilder) .field("query_field2", queryBuilder) - .endObject().bytes() - ); + .endObject().bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getFields().size(), equalTo(14)); // also includes all other meta fields BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); @@ -388,35 +410,35 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); QueryBuilder queryBuilder = matchQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", + ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject().startObject("object_field") .field("query_field", queryBuilder) - .endObject().endObject().bytes() - ); + .endObject().endObject().bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getFields().size(), equalTo(11)); // also includes all other meta fields BytesRef queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); - doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", + doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .startArray("object_field") .startObject().field("query_field", queryBuilder).endObject() .endArray() - .endObject().bytes() - ); + .endObject().bytes(), + XContentType.JSON)); assertThat(doc.rootDoc().getFields().size(), equalTo(11)); // also includes all other meta fields queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); MapperParsingException e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName).parse("test", typeName, "1", + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .startArray("object_field") .startObject().field("query_field", queryBuilder).endObject() .startObject().field("query_field", queryBuilder).endObject() .endArray() - .endObject().bytes() - ); + .endObject().bytes(), + XContentType.JSON)); } ); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); @@ -426,42 +448,47 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { public void testRangeQueryWithNowRangeIsForbidden() throws Exception { addQueryMapping(); MapperParsingException e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName).parse("test", typeName, "1", + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .field(fieldName, rangeQuery("date_field").from("2016-01-01||/D").to("now")) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); } ); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName).parse("test", typeName, "1", + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .field(fieldName, rangeQuery("date_field").from("2016-01-01||/D").to("now/D")) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); } ); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName).parse("test", typeName, "1", + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .field(fieldName, rangeQuery("date_field").from("now-1d").to("now")) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); } ); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName).parse("test", typeName, "1", + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .field(fieldName, rangeQuery("date_field").from("now")) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); } ); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName).parse("test", typeName, "1", + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .field(fieldName, rangeQuery("date_field").to("now")) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); } ); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); @@ -471,31 +498,39 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { public void testVerifyRangeQueryWithNullBounds() throws Exception { addQueryMapping(); MapperParsingException e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName).parse("test", typeName, "1", + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .field(fieldName, rangeQuery("date_field").from("now").to(null)) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); + } ); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName).parse("test", typeName, "1", + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .field(fieldName, rangeQuery("date_field").from(null).to("now")) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); + } ); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); // No validation failures: - mapperService.documentMapper(typeName).parse("test", typeName, "1", + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .field(fieldName, rangeQuery("date_field").from("2016-01-01").to(null)) - .endObject().bytes()); - mapperService.documentMapper(typeName).parse("test", typeName, "1", + .endObject().bytes(), + XContentType.JSON)); + + mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", jsonBuilder().startObject() .field(fieldName, rangeQuery("date_field").from(null).to("2016-01-01")) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); + } public void testUnsupportedQueries() { @@ -570,10 +605,11 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { query.endObject(); query.endObject(); - ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", + ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", XContentFactory.jsonBuilder().startObject() .rawField(fieldName, new BytesArray(query.string()), query.contentType()) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); BytesRef querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); Map parsedQuery = XContentHelper.convertToMap(new BytesArray(querySource), true).v2(); assertEquals(Script.DEFAULT_SCRIPT_LANG, XContentMapValues.extractValue("script.script.lang", parsedQuery)); @@ -597,10 +633,11 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { query.endObject(); query.endObject(); - doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", + doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", XContentFactory.jsonBuilder().startObject() .rawField(fieldName, new BytesArray(query.string()), query.contentType()) - .endObject().bytes()); + .endObject().bytes(), + XContentType.JSON)); querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); parsedQuery = XContentHelper.convertToMap(new BytesArray(querySource), true).v2(); assertEquals(Script.DEFAULT_SCRIPT_LANG, diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java index 796106c269e..6781da64972 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -141,15 +141,18 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { } @Override - protected void cleanup() { - /* This is called on the RestClient's thread pool and attempting to close the client on its own threadpool causes it to fail to - * close. So we always shutdown the RestClient asynchronously on a thread in Elasticsearch's generic thread pool. */ + protected void cleanup(Runnable onCompletion) { + /* This is called on the RestClient's thread pool and attempting to close the client on its + * own threadpool causes it to fail to close. So we always shutdown the RestClient + * asynchronously on a thread in Elasticsearch's generic thread pool. */ threadPool.generic().submit(() -> { try { client.close(); logger.debug("Shut down remote connection"); } catch (IOException e) { logger.error("Failed to shutdown the remote connection", e); + } finally { + onCompletion.run(); } }); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index 7376ed54349..eb7abea6af5 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -80,7 +80,9 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Matchers.any; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class RemoteScrollableHitSourceTests extends ESTestCase { @@ -478,6 +480,25 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { e.getCause().getCause().getCause().getMessage()); } + public void testCleanupSuccessful() throws Exception { + AtomicBoolean cleanupCallbackCalled = new AtomicBoolean(); + RestClient client = mock(RestClient.class); + TestRemoteScrollableHitSource hitSource = new TestRemoteScrollableHitSource(client); + hitSource.cleanup(() -> cleanupCallbackCalled.set(true)); + verify(client).close(); + assertTrue(cleanupCallbackCalled.get()); + } + + public void testCleanupFailure() throws Exception { + AtomicBoolean cleanupCallbackCalled = new AtomicBoolean(); + RestClient client = mock(RestClient.class); + doThrow(new RuntimeException("test")).when(client).close(); + TestRemoteScrollableHitSource hitSource = new TestRemoteScrollableHitSource(client); + hitSource.cleanup(() -> cleanupCallbackCalled.set(true)); + verify(client).close(); + assertTrue(cleanupCallbackCalled.get()); + } + private RemoteScrollableHitSource sourceWithMockedRemoteCall(String... paths) throws Exception { return sourceWithMockedRemoteCall(true, ContentType.APPLICATION_JSON, paths); } diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java index 314e195754a..7c15dac212e 100644 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java +++ b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java @@ -23,6 +23,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -37,6 +39,7 @@ import org.junit.Before; import java.io.BufferedWriter; import java.io.IOException; +import java.net.InetAddress; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; @@ -85,7 +88,15 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase { BigArrays.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService(), new NamedWriteableRegistry(Collections.emptyList()), - new NetworkService(Settings.EMPTY, Collections.emptyList())); + new NetworkService(Settings.EMPTY, Collections.emptyList())) { + @Override + public BoundTransportAddress boundAddress() { + return new BoundTransportAddress( + new TransportAddress[]{new TransportAddress(InetAddress.getLoopbackAddress(), 9300)}, + new TransportAddress(InetAddress.getLoopbackAddress(), 9300) + ); + } + }; transportService = new MockTransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/util/Access.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/util/Access.java index ae96dee64c4..40136f8fc01 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/util/Access.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/util/Access.java @@ -22,27 +22,29 @@ package org.elasticsearch.cloud.gce.util; import org.elasticsearch.SpecialPermission; import java.io.IOException; +import java.net.SocketPermission; import java.security.AccessController; import java.security.PrivilegedAction; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; /** - * GCE's http client changes access levels. Specifically it needs {@link RuntimePermission} accessDeclaredMembers and - * setFactory and {@link java.lang.reflect.ReflectPermission} suppressAccessChecks. For remote calls the plugin needs - * SocketPermissions for 'connect'. This class wraps the operations requiring access in + * GCE's HTTP client changes access levels. Specifically it needs {@link RuntimePermission} {@code + * accessDeclaredMembers} and {@code setFactory}, and {@link java.lang.reflect.ReflectPermission} + * {@code suppressAccessChecks}. For remote calls, the plugin needs {@link SocketPermission} for + * {@code connect}. This class wraps the operations requiring access in * {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ public final class Access { private Access() {} - public static T doPrivileged(PrivilegedAction operation) { + public static T doPrivileged(final PrivilegedAction operation) { SpecialPermission.check(); return AccessController.doPrivileged(operation); } - public static void doPrivilegedVoid(Runnable action) { + public static void doPrivilegedVoid(final Runnable action) { SpecialPermission.check(); AccessController.doPrivileged((PrivilegedAction) () -> { action.run(); @@ -50,12 +52,14 @@ public final class Access { }); } - public static T doPrivilegedIOException(PrivilegedExceptionAction operation) throws IOException { + public static T doPrivilegedIOException(final PrivilegedExceptionAction operation) + throws IOException { SpecialPermission.check(); try { return AccessController.doPrivileged(operation); - } catch (PrivilegedActionException e) { + } catch (final PrivilegedActionException e) { throw (IOException) e.getCause(); } } + } diff --git a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java index c7ffe4f287f..3c0f3b0433c 100644 --- a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java +++ b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java @@ -47,7 +47,9 @@ import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.security.ProtectionDomain; import java.security.SecurityPermission; +import java.util.Arrays; import java.util.Collections; +import java.util.LinkedHashSet; import java.util.PropertyPermission; import java.util.Set; @@ -128,7 +130,12 @@ final class TikaImpl { addReadPermissions(perms, JarHell.parseClassPath()); // plugin jars if (TikaImpl.class.getClassLoader() instanceof URLClassLoader) { - addReadPermissions(perms, ((URLClassLoader)TikaImpl.class.getClassLoader()).getURLs()); + URL[] urls = ((URLClassLoader)TikaImpl.class.getClassLoader()).getURLs(); + Set set = new LinkedHashSet<>(Arrays.asList(urls)); + if (set.size() != urls.length) { + throw new AssertionError("duplicate jars: " + Arrays.toString(urls)); + } + addReadPermissions(perms, set); } // jvm's java.io.tmpdir (needs read/write) perms.add(new FilePermission(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "-", @@ -145,7 +152,7 @@ final class TikaImpl { // add resources to (what is typically) a jar, but might not be (e.g. in tests/IDE) @SuppressForbidden(reason = "adds access to jar resources") - static void addReadPermissions(Permissions perms, URL resources[]) { + static void addReadPermissions(Permissions perms, Set resources) { try { for (URL url : resources) { Path path = PathUtils.get(url.toURI()); diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index 325ac726713..e16a8f05203 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -27,11 +27,13 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.plugins.Plugin; @@ -78,7 +80,11 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { .field("type", "murmur3") .endObject().endObject().endObject().endObject().string(); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument parsedDoc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes()); + ParsedDocument parsedDoc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject().bytes(), + XContentType.JSON)); IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); assertNotNull(fields); assertEquals(Arrays.toString(fields), 1, fields.length); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java new file mode 100644 index 00000000000..79c3f9248b5 --- /dev/null +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java @@ -0,0 +1,115 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matcher; +import org.junit.After; +import org.junit.Before; + +import java.util.Collections; +import java.util.List; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.bootstrap.BootstrapChecks.ES_ENFORCE_BOOTSTRAP_CHECKS; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +public class EvilBootstrapChecksTests extends ESTestCase { + + private String esEnforceBootstrapChecks = System.getProperty(ES_ENFORCE_BOOTSTRAP_CHECKS); + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + } + + @Override + @After + public void tearDown() throws Exception { + setEsEnforceBootstrapChecks(esEnforceBootstrapChecks); + super.tearDown(); + } + + public void testEnforceBootstrapChecks() throws NodeValidationException { + setEsEnforceBootstrapChecks("true"); + final List checks = Collections.singletonList( + new BootstrapCheck() { + @Override + public boolean check() { + return true; + } + + @Override + public String errorMessage() { + return "error"; + } + } + ); + final Logger logger = mock(Logger.class); + + final NodeValidationException e = expectThrows( + NodeValidationException.class, + () -> BootstrapChecks.check(false, checks, logger)); + final Matcher allOf = + allOf(containsString("bootstrap checks failed"), containsString("error")); + assertThat(e, hasToString(allOf)); + verify(logger).info("explicitly enforcing bootstrap checks"); + verifyNoMoreInteractions(logger); + } + + public void testNonEnforcedBootstrapChecks() throws NodeValidationException { + setEsEnforceBootstrapChecks(null); + final Logger logger = mock(Logger.class); + // nothing should happen + BootstrapChecks.check(false, emptyList(), logger); + verifyNoMoreInteractions(logger); + } + + public void testInvalidValue() { + final String value = randomAsciiOfLength(8); + setEsEnforceBootstrapChecks(value); + final boolean enforceLimits = randomBoolean(); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> BootstrapChecks.check(enforceLimits, emptyList(), "testInvalidValue")); + final Matcher matcher = containsString( + "[es.enforce.bootstrap.checks] must be [true] but was [" + value + "]"); + assertThat(e, hasToString(matcher)); + } + + @SuppressForbidden(reason = "set or clear system property es.enforce.bootstrap.checks") + public void setEsEnforceBootstrapChecks(final String value) { + if (value == null) { + System.clearProperty(ES_ENFORCE_BOOTSTRAP_CHECKS); + } else { + System.setProperty(ES_ENFORCE_BOOTSTRAP_CHECKS, value); + } + } + +} diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilJNANativesTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilJNANativesTests.java index 069f7e1aeff..eb679df9f6a 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilJNANativesTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilJNANativesTests.java @@ -33,14 +33,18 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class EvilJNANativesTests extends ESTestCase { + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/23640") public void testSetMaximumNumberOfThreads() throws IOException { if (Constants.LINUX) { final List lines = Files.readAllLines(PathUtils.get("/proc/self/limits")); if (!lines.isEmpty()) { - for (String line : lines) { + for (final String line : lines) { if (line != null && line.startsWith("Max processes")) { final String[] fields = line.split("\\s+"); - final long limit = "unlimited".equals(fields[2]) ? JNACLibrary.RLIM_INFINITY : Long.parseLong(fields[2]); + final long limit = + "unlimited".equals(fields[2]) + ? JNACLibrary.RLIM_INFINITY + : Long.parseLong(fields[2]); assertThat(JNANatives.MAX_NUMBER_OF_THREADS, equalTo(limit)); return; } @@ -52,22 +56,27 @@ public class EvilJNANativesTests extends ESTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/23640") public void testSetMaxSizeVirtualMemory() throws IOException { if (Constants.LINUX) { final List lines = Files.readAllLines(PathUtils.get("/proc/self/limits")); if (!lines.isEmpty()) { - for (String line : lines) { + for (final String line : lines) { if (line != null && line.startsWith("Max address space")) { final String[] fields = line.split("\\s+"); final String limit = fields[3]; - assertEquals(JNANatives.rlimitToString(JNANatives.MAX_SIZE_VIRTUAL_MEMORY), limit); + assertThat( + JNANatives.rlimitToString(JNANatives.MAX_SIZE_VIRTUAL_MEMORY), + equalTo(limit)); return; } } } fail("should have read max size virtual memory from /proc/self/limits"); } else if (Constants.MAC_OS_X) { - assertThat(JNANatives.MAX_SIZE_VIRTUAL_MEMORY, anyOf(equalTo(Long.MIN_VALUE), greaterThanOrEqualTo(0L))); + assertThat( + JNANatives.MAX_SIZE_VIRTUAL_MEMORY, + anyOf(equalTo(Long.MIN_VALUE), greaterThanOrEqualTo(0L))); } else { assertThat(JNANatives.MAX_SIZE_VIRTUAL_MEMORY, equalTo(Long.MIN_VALUE)); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 8d99ab8e89d..732852ca153 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -595,7 +595,7 @@ public class InstallPluginCommandTests extends ESTestCase { stream.putNextEntry(new ZipEntry("elasticsearch/../blah")); } String pluginZip = zip.toUri().toURL().toString(); - IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("resolving outside of plugin directory")); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java index 4412d19394f..10918eea189 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java @@ -25,18 +25,15 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; import java.util.stream.Collectors; import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.Version; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.common.inject.spi.HasDependencies; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.Version; import org.junit.Before; @LuceneTestCase.SuppressFileSystems("*") @@ -155,4 +152,26 @@ public class ListPluginsCommandTests extends ESTestCase { env.pluginsFile().resolve("fake1").resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString() + "]"); } + public void testExistingIncompatiblePlugin() throws Exception { + PluginTestUtil.writeProperties(env.pluginsFile().resolve("fake_plugin1"), + "description", "fake desc 1", + "name", "fake_plugin1", + "version", "1.0", + "elasticsearch.version", Version.fromString("1.0.0").toString(), + "java.version", System.getProperty("java.specification.version"), + "classname", "org.fake1"); + buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); + + MockTerminal terminal = listPlugins(home); + assertEquals("fake_plugin1\n" + + "WARNING: Plugin [fake_plugin1] is incompatible with Elasticsearch [" + + Version.CURRENT.toString() + "]. Was designed for version [1.0.0]\n" + + "fake_plugin2\n", + terminal.getOutput()); + + String[] params = {"-s"}; + terminal = listPlugins(home, params); + assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput()); + } + } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index 997fbeffadd..a42e66fe872 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -79,7 +79,7 @@ public class RemovePluginCommandTests extends ESTestCase { public void testMissing() throws Exception { UserException e = expectThrows(UserException.class, () -> removePlugin("dne", home)); - assertTrue(e.getMessage(), e.getMessage().contains("plugin dne not found")); + assertTrue(e.getMessage(), e.getMessage().contains("plugin [dne] not found")); assertRemoveCleaned(env); } @@ -136,7 +136,7 @@ public class RemovePluginCommandTests extends ESTestCase { public void testRemoveUninstalledPluginErrors() throws Exception { UserException e = expectThrows(UserException.class, () -> removePlugin("fake", home)); assertEquals(ExitCodes.CONFIG, e.exitCode); - assertEquals("plugin fake not found; run 'elasticsearch-plugin list' to get list of installed plugins", e.getMessage()); + assertEquals("plugin [fake] not found; run 'elasticsearch-plugin list' to get list of installed plugins", e.getMessage()); MockTerminal terminal = new MockTerminal(); new RemovePluginCommand() { @@ -146,8 +146,8 @@ public class RemovePluginCommandTests extends ESTestCase { } }.main(new String[] { "-Epath.home=" + home, "fake" }, terminal); try (BufferedReader reader = new BufferedReader(new StringReader(terminal.getOutput()))) { - assertEquals("-> Removing fake...", reader.readLine()); - assertEquals("ERROR: plugin fake not found; run 'elasticsearch-plugin list' to get list of installed plugins", + assertEquals("-> removing [fake]...", reader.readLine()); + assertEquals("ERROR: plugin [fake] not found; run 'elasticsearch-plugin list' to get list of installed plugins", reader.readLine()); assertNull(reader.readLine()); } @@ -160,7 +160,7 @@ public class RemovePluginCommandTests extends ESTestCase { } private String expectedConfigDirPreservedMessage(final Path configDir) { - return "-> Preserving plugin config files [" + configDir + "] in case of upgrade, delete manually if not needed"; + return "-> preserving plugin config files [" + configDir + "] in case of upgrade; delete manually if not needed"; } } diff --git a/qa/vagrant/src/test/resources/packaging/utils/utils.bash b/qa/vagrant/src/test/resources/packaging/utils/utils.bash index 143430a542f..f8abe1b5266 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/utils.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/utils.bash @@ -435,7 +435,7 @@ wait_for_elasticsearch_status() { if [ $? -eq 0 ]; then echo "Connected" else - echo "Unable to connect to Elastisearch" + echo "Unable to connect to Elasticsearch" false fi diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json index b76d560bcb4..8ed3202e9af 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -146,7 +146,7 @@ }, "wait_for_completion": { "type" : "boolean", - "default": false, + "default": true, "description" : "Should the request should block until the delete-by-query is complete." }, "requests_per_second": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json index 31a50c4a8c4..1f24199fad4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json @@ -50,6 +50,10 @@ "type" : "boolean", "description" : "Clear the recycler cache" }, + "request_cache": { + "type" : "boolean", + "description" : "Clear request cache" + }, "request": { "type" : "boolean", "description" : "Clear request cache" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json index 7a0977da194..3fec822f1d9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json @@ -66,6 +66,10 @@ "rewrite": { "type": "boolean", "description": "Provide a more detailed explanation showing the actual Lucene query that will be executed." + }, + "all_shards": { + "type": "boolean", + "description": "Execute validation on all shards instead of one random shard per index" } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json index 4756361ca16..fc701c29d60 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json @@ -22,7 +22,7 @@ }, "wait_for_completion": { "type" : "boolean", - "default": false, + "default": true, "description" : "Should the request should block until the reindex is complete." }, "requests_per_second": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json index 8130be8a11f..072e950686a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json @@ -154,7 +154,7 @@ }, "wait_for_completion": { "type" : "boolean", - "default": false, + "default": true, "description" : "Should the request should block until the update by query operation is complete." }, "requests_per_second": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yaml index def91f42807..e25626cf3ae 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yaml @@ -11,6 +11,8 @@ - do: count: + # we count through the primary in case there is a replica that has not yet fully recovered + preference: _primary index: test_index - match: {count: 2} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yaml index 3388d06ba17..68bb11c42ba 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yaml @@ -2,3 +2,26 @@ "clear_cache test": - do: indices.clear_cache: {} + +--- +"clear_cache with request set to false": + - skip: + version: " - 5.4.99" + reason: this name was added in 5.4 - temporarilly skipping 5.4 until snapshot is finished + + - do: + indices.clear_cache: + request: false + +--- +"clear_cache with request_cache set to false": + - skip: + version: " - 5.4.99" + reason: request_cache was deprecated in 5.4.0 - temporarilly skipping 5.4 until snapshot is finished + features: "warnings" + + - do: + warnings: + - 'Deprecated field [request_cache] used, expected [request] instead' + indices.clear_cache: + request_cache: false diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yaml index bff9a169604..71eb5665bea 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yaml @@ -43,7 +43,7 @@ filter_path: "took" body: "{ \"query\": { \"match_all\": {} } }" - - is_true: took + - gte: { took: 0 } - is_false: _shards.total - is_false: hits.total - is_false: hits.hits.0._index diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index fe833470ad2..4ab45d8c1a6 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -182,7 +182,7 @@ public class BootstrapForTesting { } // compute classpath minus obvious places, all other jars will get the permission. - Set codebases = new HashSet<>(Arrays.asList(parseClassPathWithSymlinks())); + Set codebases = new HashSet<>(parseClassPathWithSymlinks()); Set excluded = new HashSet<>(Arrays.asList( // es core Bootstrap.class.getProtectionDomain().getCodeSource().getLocation(), @@ -200,7 +200,7 @@ public class BootstrapForTesting { // parse each policy file, with codebase substitution from the classpath final List policies = new ArrayList<>(); for (URL policyFile : pluginPolicies) { - policies.add(Security.readPolicy(policyFile, codebases.toArray(new URL[codebases.size()]))); + policies.add(Security.readPolicy(policyFile, codebases)); } // consult each policy file for those codebases @@ -227,10 +227,14 @@ public class BootstrapForTesting { * this is for matching the toRealPath() in the code where we have a proper plugin structure */ @SuppressForbidden(reason = "does evil stuff with paths and urls because devs and jenkins do evil stuff with paths and urls") - static URL[] parseClassPathWithSymlinks() throws Exception { - URL raw[] = JarHell.parseClassPath(); - for (int i = 0; i < raw.length; i++) { - raw[i] = PathUtils.get(raw[i].toURI()).toRealPath().toUri().toURL(); + static Set parseClassPathWithSymlinks() throws Exception { + Set raw = JarHell.parseClassPath(); + Set cooked = new HashSet<>(raw.size()); + for (URL url : raw) { + boolean added = cooked.add(PathUtils.get(url.toURI()).toRealPath().toUri().toURL()); + if (added == false) { + throw new IllegalStateException("Duplicate in classpath after resolving symlinks: " + url); + } } return raw; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 1ea96e6f548..fa659e06fb2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -1093,9 +1093,15 @@ public abstract class ESTestCase extends LuceneTestCase { } protected static long spinForAtLeastOneMillisecond() { - long nanosecondsInMillisecond = TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS); - // force at least one millisecond to elapse, but ensure the - // clock has enough resolution to observe the passage of time + return spinForAtLeastNMilliseconds(1); + } + + protected static long spinForAtLeastNMilliseconds(final long ms) { + long nanosecondsInMillisecond = TimeUnit.NANOSECONDS.convert(ms, TimeUnit.MILLISECONDS); + /* + * Force at least ms milliseconds to elapse, but ensure the clock has enough resolution to + * observe the passage of time. + */ long start = System.nanoTime(); long elapsed; while ((elapsed = (System.nanoTime() - start)) < nanosecondsInMillisecond) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java index b07e7315b88..bccbd537a53 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java +++ b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java @@ -25,8 +25,8 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskAwareRequest; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.transport.TransportRequest; import java.util.Collection; import java.util.concurrent.CopyOnWriteArrayList; @@ -46,7 +46,7 @@ public class MockTaskManager extends TaskManager { } @Override - public Task register(String type, String action, TransportRequest request) { + public Task register(String type, String action, TaskAwareRequest request) { Task task = super.register(type, action, request); if (task != null) { for (MockTaskManagerListener listener : listeners) {