diff --git a/.gitignore b/.gitignore index d1810a5a83f..b4ec8795057 100644 --- a/.gitignore +++ b/.gitignore @@ -33,6 +33,7 @@ dependency-reduced-pom.xml # testing stuff **/.local* .vagrant/ +/logs/ # osx stuff .DS_Store diff --git a/README.textile b/README.textile index 52c7607509c..dc3a263cd7c 100644 --- a/README.textile +++ b/README.textile @@ -123,7 +123,7 @@ There are many more options to perform search, after all, it's a search product h3. Multi Tenant - Indices and Types -Maan, that twitter index might get big (in this case, index size == valuation). Let's see if we can structure our twitter system a bit differently in order to support such large amounts of data. +Man, that twitter index might get big (in this case, index size == valuation). Let's see if we can structure our twitter system a bit differently in order to support such large amounts of data. Elasticsearch supports multiple indices, as well as multiple types per index. In the previous example we used an index called @twitter@, with two types, @user@ and @tweet@. diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 6b14e7f4f7f..ef2372c003d 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -325,7 +325,7 @@ vagrant plugin install vagrant-cachier . Validate your installed dependencies: ------------------------------------- -gradle :qa:vagrant:checkVagrantVersion +gradle :qa:vagrant:vagrantCheckVersion ------------------------------------- . Download and smoke test the VMs with `gradle vagrantSmokeTest` or @@ -417,17 +417,26 @@ and in another window: ---------------------------------------------------- vagrant up centos-7 --provider virtualbox && vagrant ssh centos-7 -cd $TESTROOT -sudo bats $BATS/*rpm*.bats +cd $BATS_ARCHIVES +sudo -E bats $BATS_TESTS/*rpm*.bats ---------------------------------------------------- If you wanted to retest all the release artifacts on a single VM you could: ------------------------------------------------- -gradle prepareTestRoot +gradle vagrantSetUp vagrant up ubuntu-1404 --provider virtualbox && vagrant ssh ubuntu-1404 -cd $TESTROOT -sudo bats $BATS/*.bats +cd $BATS_ARCHIVES +sudo -E bats $BATS_TESTS/*.bats +------------------------------------------------- + +Note: Starting vagrant VM outside of the elasticsearch folder requires to +indicates the folder that contains the Vagrantfile using the VAGRANT_CWD +environment variable: + +------------------------------------------------- +gradle vagrantSetUp +VAGRANT_CWD=/path/to/elasticsearch vagrant up centos-7 --provider virtualbox ------------------------------------------------- == Coverage analysis diff --git a/Vagrantfile b/Vagrantfile index 96151724d13..592f0fdc4a5 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -77,6 +77,9 @@ Vagrant.configure(2) do |config| # the elasticsearch project called vagrant.... config.vm.synced_folder ".", "/vagrant", disabled: true config.vm.synced_folder ".", "/elasticsearch" + # Expose project directory + PROJECT_DIR = ENV['VAGRANT_PROJECT_DIR'] || Dir.pwd + config.vm.synced_folder PROJECT_DIR, "/project" config.vm.provider "virtualbox" do |v| # Give the boxes 3GB because Elasticsearch defaults to using 2GB v.memory = 3072 @@ -272,8 +275,10 @@ export ZIP=/elasticsearch/distribution/zip/build/distributions export TAR=/elasticsearch/distribution/tar/build/distributions export RPM=/elasticsearch/distribution/rpm/build/distributions export DEB=/elasticsearch/distribution/deb/build/distributions -export TESTROOT=/elasticsearch/qa/vagrant/build/testroot -export BATS=/elasticsearch/qa/vagrant/src/test/resources/packaging/scripts +export BATS=/project/build/bats +export BATS_UTILS=/project/build/bats/utils +export BATS_TESTS=/project/build/bats/tests +export BATS_ARCHIVES=/project/build/bats/archives VARS SHELL end diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 65402290e01..628e59de1a6 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -495,6 +495,8 @@ class BuildPlugin implements Plugin { systemProperty 'tests.artifact', project.name systemProperty 'tests.task', path systemProperty 'tests.security.manager', 'true' + // Breaking change in JDK-9, revert to JDK-8 behavior for now, see https://github.com/elastic/elasticsearch/issues/21534 + systemProperty 'jdk.io.permissionsUseCanonicalPath', 'true' systemProperty 'jna.nosys', 'true' // default test sysprop values systemProperty 'tests.ifNoTests', 'fail' diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 076a564f84a..018f9fde2f2 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -143,6 +143,10 @@ public class ThirdPartyAuditTask extends AntTask { if (m.matches()) { missingClasses.add(m.group(1).replace('.', '/') + ".class"); } + + // Reset the priority of the event to DEBUG, so it doesn't + // pollute the build output + event.setMessage(event.getMessage(), Project.MSG_DEBUG); } else if (event.getPriority() == Project.MSG_ERR) { Matcher m = VIOLATION_PATTERN.matcher(event.getMessage()); if (m.matches()) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index 36828f1cb86..ca4957f7a6c 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -104,11 +104,13 @@ class ClusterConfiguration { @Input Closure waitCondition = { NodeInfo node, AntBuilder ant -> File tmpFile = new File(node.cwd, 'wait.success') - ant.echo("==> [${new Date()}] checking health: http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow") + String waitUrl = "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow" + ant.echo(message: "==> [${new Date()}] checking health: ${waitUrl}", + level: 'info') // checking here for wait_for_nodes to be >= the number of nodes because its possible // this cluster is attempting to connect to nodes created by another task (same cluster name), // so there will be more nodes in that case in the cluster state - ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow", + ant.get(src: waitUrl, dest: tmpFile.toString(), ignoreerrors: true, // do not fail on error, so logging buffers can be flushed by the wait task retries: 10) @@ -121,7 +123,7 @@ class ClusterConfiguration { Map systemProperties = new HashMap<>() - Map settings = new HashMap<>() + Map settings = new HashMap<>() // map from destination path, to source file Map extraConfigFiles = new HashMap<>() @@ -138,7 +140,7 @@ class ClusterConfiguration { } @Input - void setting(String name, String value) { + void setting(String name, Object value) { settings.put(name, value) } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy index c68e0528c9b..65b90c4d9a0 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy @@ -18,14 +18,7 @@ */ package org.elasticsearch.gradle.vagrant -import org.gradle.api.DefaultTask import org.gradle.api.tasks.Input -import org.gradle.api.tasks.TaskAction -import org.gradle.logging.ProgressLoggerFactory -import org.gradle.process.internal.ExecAction -import org.gradle.process.internal.ExecActionFactory - -import javax.inject.Inject /** * Runs bats over vagrant. Pretty much like running it using Exec but with a diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy index d79c2533fab..ecba08d7d4c 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy @@ -34,11 +34,18 @@ public class VagrantCommandTask extends LoggedExec { @Input String boxName + @Input + Map environmentVars + public VagrantCommandTask() { executable = 'vagrant' + project.afterEvaluate { // It'd be nice if --machine-readable were, well, nice standardOutput = new TeeOutputStream(standardOutput, createLoggerOutputStream()) + if (environmentVars != null) { + environment environmentVars + } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy new file mode 100644 index 00000000000..f16913d5be6 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.vagrant + +import org.gradle.api.tasks.Input + +class VagrantPropertiesExtension { + + @Input + List boxes + + @Input + Long testSeed + + @Input + String formattedTestSeed + + @Input + String upgradeFromVersion + + @Input + List upgradeFromVersions + + @Input + String batsDir + + @Input + Boolean inheritTests + + @Input + Boolean inheritTestArchives + + @Input + Boolean inheritTestUtils + + VagrantPropertiesExtension(List availableBoxes) { + this.boxes = availableBoxes + this.batsDir = 'src/test/resources/packaging' + } + + void boxes(String... boxes) { + this.boxes = Arrays.asList(boxes) + } + + void setBatsDir(String batsDir) { + this.batsDir = batsDir + } + + void setInheritTests(Boolean inheritTests) { + this.inheritTests = inheritTests + } + + void setInheritTestArchives(Boolean inheritTestArchives) { + this.inheritTestArchives = inheritTestArchives + } + + void setInheritTestUtils(Boolean inheritTestUtils) { + this.inheritTestUtils = inheritTestUtils + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy new file mode 100644 index 00000000000..0c16a8972c8 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -0,0 +1,457 @@ +package org.elasticsearch.gradle.vagrant + +import org.elasticsearch.gradle.FileContentsTask +import org.gradle.BuildAdapter +import org.gradle.BuildResult +import org.gradle.api.* +import org.gradle.api.artifacts.dsl.RepositoryHandler +import org.gradle.api.internal.artifacts.dependencies.DefaultProjectDependency +import org.gradle.api.tasks.Copy +import org.gradle.api.tasks.Delete +import org.gradle.api.tasks.Exec + +class VagrantTestPlugin implements Plugin { + + /** All available boxes **/ + static List BOXES = [ + 'centos-6', + 'centos-7', + 'debian-8', + 'fedora-24', + 'oel-6', + 'oel-7', + 'opensuse-13', + 'sles-12', + 'ubuntu-1204', + 'ubuntu-1404', + 'ubuntu-1604' + ] + + /** Boxes used when sampling the tests **/ + static List SAMPLE = [ + 'centos-7', + 'ubuntu-1404', + ] + + /** All onboarded archives by default, available for Bats tests even if not used **/ + static List DISTRIBUTION_ARCHIVES = ['tar', 'rpm', 'deb'] + + /** Packages onboarded for upgrade tests **/ + static List UPGRADE_FROM_ARCHIVES = ['rpm', 'deb'] + + private static final BATS = 'bats' + private static final String BATS_TEST_COMMAND ="cd \$BATS_ARCHIVES && sudo -E bats --tap \$BATS_TESTS/*.$BATS" + + @Override + void apply(Project project) { + + // Creates the Vagrant extension for the project + project.extensions.create('esvagrant', VagrantPropertiesExtension, listVagrantBoxes(project)) + + // Add required repositories for Bats tests + configureBatsRepositories(project) + + // Creates custom configurations for Bats testing files (and associated scripts and archives) + createBatsConfiguration(project) + + // Creates all the main Vagrant tasks + createVagrantTasks(project) + + if (project.extensions.esvagrant.boxes == null || project.extensions.esvagrant.boxes.size() == 0) { + throw new InvalidUserDataException('Vagrant boxes cannot be null or empty for esvagrant') + } + + for (String box : project.extensions.esvagrant.boxes) { + if (BOXES.contains(box) == false) { + throw new InvalidUserDataException("Vagrant box [${box}] not found, available virtual machines are ${BOXES}") + } + } + + // Creates all tasks related to the Vagrant boxes + createVagrantBoxesTasks(project) + } + + private List listVagrantBoxes(Project project) { + String vagrantBoxes = project.getProperties().get('vagrant.boxes', 'sample') + if (vagrantBoxes == 'sample') { + return SAMPLE + } else if (vagrantBoxes == 'all') { + return BOXES + } else { + return vagrantBoxes.split(',') + } + } + + private static Set listVersions(Project project) { + Node xml + new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s -> + xml = new XmlParser().parse(s) + } + Set versions = new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /[5]\.\d\.\d/ }) + if (versions.isEmpty() == false) { + return versions; + } + + // If no version is found, we run the tests with the current version + return Collections.singleton(project.version); + } + + private static File getVersionsFile(Project project) { + File versions = new File(project.projectDir, 'versions'); + if (versions.exists() == false) { + // Use the elasticsearch's versions file from project :qa:vagrant + versions = project.project(":qa:vagrant").file('versions') + } + return versions + } + + private static void configureBatsRepositories(Project project) { + RepositoryHandler repos = project.repositories + + // Try maven central first, it'll have releases before 5.0.0 + repos.mavenCentral() + + /* Setup a repository that tries to download from + https://artifacts.elastic.co/downloads/elasticsearch/[module]-[revision].[ext] + which should work for 5.0.0+. This isn't a real ivy repository but gradle + is fine with that */ + repos.ivy { + artifactPattern "https://artifacts.elastic.co/downloads/elasticsearch/[module]-[revision].[ext]" + } + } + + private static void createBatsConfiguration(Project project) { + project.configurations.create(BATS) + + Long seed + String formattedSeed = null + String[] upgradeFromVersions + + String maybeTestsSeed = System.getProperty("tests.seed", null); + if (maybeTestsSeed != null) { + List seeds = maybeTestsSeed.tokenize(':') + if (seeds.size() != 0) { + String masterSeed = seeds.get(0) + seed = new BigInteger(masterSeed, 16).longValue() + formattedSeed = maybeTestsSeed + } + } + if (formattedSeed == null) { + seed = new Random().nextLong() + formattedSeed = String.format("%016X", seed) + } + + String maybeUpdradeFromVersions = System.getProperty("tests.packaging.upgrade.from.versions", null) + if (maybeUpdradeFromVersions != null) { + upgradeFromVersions = maybeUpdradeFromVersions.split(",") + } else { + upgradeFromVersions = getVersionsFile(project) + } + + String upgradeFromVersion = upgradeFromVersions[new Random(seed).nextInt(upgradeFromVersions.length)] + + DISTRIBUTION_ARCHIVES.each { + // Adds a dependency for the current version + project.dependencies.add(BATS, project.dependencies.project(path: ":distribution:${it}", configuration: 'archives')) + } + + UPGRADE_FROM_ARCHIVES.each { + // The version of elasticsearch that we upgrade *from* + project.dependencies.add(BATS, "org.elasticsearch.distribution.${it}:elasticsearch:${upgradeFromVersion}@${it}") + } + + project.extensions.esvagrant.testSeed = seed + project.extensions.esvagrant.formattedTestSeed = formattedSeed + project.extensions.esvagrant.upgradeFromVersion = upgradeFromVersion + project.extensions.esvagrant.upgradeFromVersions = upgradeFromVersions + } + + private static void createCleanTask(Project project) { + project.tasks.create('clean', Delete.class) { + description 'Clean the project build directory' + group 'Build' + delete project.buildDir + } + } + + private static void createStopTask(Project project) { + project.tasks.create('stop') { + description 'Stop any tasks from tests that still may be running' + group 'Verification' + } + } + + private static void createSmokeTestTask(Project project) { + project.tasks.create('vagrantSmokeTest') { + description 'Smoke test the specified vagrant boxes' + group 'Verification' + } + } + + private static void createPrepareVagrantTestEnvTask(Project project) { + File batsDir = new File("${project.buildDir}/${BATS}") + + Task createBatsDirsTask = project.tasks.create('createBatsDirs') + createBatsDirsTask.outputs.dir batsDir + createBatsDirsTask.dependsOn project.tasks.vagrantVerifyVersions + createBatsDirsTask.doLast { + batsDir.mkdirs() + } + + Copy copyBatsArchives = project.tasks.create('copyBatsArchives', Copy) { + dependsOn createBatsDirsTask + into "${batsDir}/archives" + from project.configurations[BATS] + } + + Copy copyBatsTests = project.tasks.create('copyBatsTests', Copy) { + dependsOn createBatsDirsTask + into "${batsDir}/tests" + from { + "${project.extensions.esvagrant.batsDir}/tests" + } + } + + Copy copyBatsUtils = project.tasks.create('copyBatsUtils', Copy) { + dependsOn createBatsDirsTask + into "${batsDir}/utils" + from { + "${project.extensions.esvagrant.batsDir}/utils" + } + } + + // Now we iterate over dependencies of the bats configuration. When a project dependency is found, + // we bring back its own archives, test files or test utils. + project.afterEvaluate { + project.configurations.bats.dependencies.findAll {it.configuration == BATS }.each { d -> + if (d instanceof DefaultProjectDependency) { + DefaultProjectDependency externalBatsDependency = (DefaultProjectDependency) d + Project externalBatsProject = externalBatsDependency.dependencyProject + String externalBatsDir = externalBatsProject.extensions.esvagrant.batsDir + + if (project.extensions.esvagrant.inheritTests) { + copyBatsTests.from(externalBatsProject.files("${externalBatsDir}/tests")) + } + if (project.extensions.esvagrant.inheritTestArchives) { + copyBatsArchives.from(externalBatsDependency.projectConfiguration.files) + } + if (project.extensions.esvagrant.inheritTestUtils) { + copyBatsUtils.from(externalBatsProject.files("${externalBatsDir}/utils")) + } + } + } + } + + Task createVersionFile = project.tasks.create('createVersionFile', FileContentsTask) { + dependsOn createBatsDirsTask + file "${batsDir}/archives/version" + contents project.version + } + + Task createUpgradeFromFile = project.tasks.create('createUpgradeFromFile', FileContentsTask) { + dependsOn createBatsDirsTask + file "${batsDir}/archives/upgrade_from_version" + contents project.extensions.esvagrant.upgradeFromVersion + } + + Task vagrantSetUpTask = project.tasks.create('vagrantSetUp') + vagrantSetUpTask.dependsOn 'vagrantCheckVersion' + vagrantSetUpTask.dependsOn copyBatsTests, copyBatsUtils, copyBatsArchives, createVersionFile, createUpgradeFromFile + vagrantSetUpTask.doFirst { + project.gradle.addBuildListener new BuildAdapter() { + @Override + void buildFinished(BuildResult result) { + if (result.failure) { + println "Reproduce with: gradle packagingTest " + +"-Pvagrant.boxes=${project.extensions.esvagrant.boxes} " + + "-Dtests.seed=${project.extensions.esvagrant.formattedSeed} " + + "-Dtests.packaging.upgrade.from.versions=${project.extensions.esvagrant.upgradeFromVersions.join(",")}" + } + } + } + } + } + + private static void createUpdateVersionsTask(Project project) { + project.tasks.create('vagrantUpdateVersions') { + description 'Update file containing options for the\n "starting" version in the "upgrade from" packaging tests.' + group 'Verification' + doLast { + File versions = getVersionsFile(project) + versions.text = listVersions(project).join('\n') + '\n' + } + } + } + + private static void createVerifyVersionsTask(Project project) { + project.tasks.create('vagrantVerifyVersions') { + description 'Update file containing options for the\n "starting" version in the "upgrade from" packaging tests.' + group 'Verification' + doLast { + String maybeUpdateFromVersions = System.getProperty("tests.packaging.upgrade.from.versions", null) + if (maybeUpdateFromVersions == null) { + Set versions = listVersions(project) + Set actualVersions = new TreeSet<>(project.extensions.esvagrant.upgradeFromVersions) + if (!versions.equals(actualVersions)) { + throw new GradleException("out-of-date versions " + actualVersions + + ", expected " + versions + "; run gradle vagrantUpdateVersions") + } + } + } + } + } + + private static void createCheckVagrantVersionTask(Project project) { + project.tasks.create('vagrantCheckVersion', Exec) { + description 'Check the Vagrant version' + group 'Verification' + commandLine 'vagrant', '--version' + standardOutput = new ByteArrayOutputStream() + doLast { + String version = standardOutput.toString().trim() + if ((version ==~ /Vagrant 1\.(8\.[6-9]|9\.[0-9])+/) == false) { + throw new InvalidUserDataException("Illegal version of vagrant [${version}]. Need [Vagrant 1.8.6+]") + } + } + } + } + + private static void createCheckVirtualBoxVersionTask(Project project) { + project.tasks.create('virtualboxCheckVersion', Exec) { + description 'Check the Virtualbox version' + group 'Verification' + commandLine 'vboxmanage', '--version' + standardOutput = new ByteArrayOutputStream() + doLast { + String version = standardOutput.toString().trim() + try { + String[] versions = version.split('\\.') + int major = Integer.parseInt(versions[0]) + int minor = Integer.parseInt(versions[1]) + if ((major < 5) || (major == 5 && minor < 1)) { + throw new InvalidUserDataException("Illegal version of virtualbox [${version}]. Need [5.1+]") + } + } catch (NumberFormatException | ArrayIndexOutOfBoundsException e) { + throw new InvalidUserDataException("Unable to parse version of virtualbox [${version}]. Required [5.1+]", e) + } + } + } + } + + private static void createPackagingTestTask(Project project) { + project.tasks.create('packagingTest') { + group 'Verification' + description "Tests yum/apt packages using vagrant and bats.\n" + + " Specify the vagrant boxes to test using the gradle property 'vagrant.boxes'.\n" + + " 'sample' can be used to test a single yum and apt box. 'all' can be used to\n" + + " test all available boxes. The available boxes are: \n" + + " ${BOXES}" + dependsOn 'vagrantCheckVersion' + } + } + + private static void createVagrantTasks(Project project) { + createCleanTask(project) + createStopTask(project) + createSmokeTestTask(project) + createUpdateVersionsTask(project) + createVerifyVersionsTask(project) + createCheckVagrantVersionTask(project) + createCheckVirtualBoxVersionTask(project) + createPrepareVagrantTestEnvTask(project) + createPackagingTestTask(project) + } + + private static void createVagrantBoxesTasks(Project project) { + assert project.extensions.esvagrant.boxes != null + + assert project.tasks.stop != null + Task stop = project.tasks.stop + + assert project.tasks.vagrantSmokeTest != null + Task vagrantSmokeTest = project.tasks.vagrantSmokeTest + + assert project.tasks.vagrantCheckVersion != null + Task vagrantCheckVersion = project.tasks.vagrantCheckVersion + + assert project.tasks.virtualboxCheckVersion != null + Task virtualboxCheckVersion = project.tasks.virtualboxCheckVersion + + assert project.tasks.vagrantSetUp != null + Task vagrantSetUp = project.tasks.vagrantSetUp + + assert project.tasks.packagingTest != null + Task packagingTest = project.tasks.packagingTest + + /* + * We always use the main project.rootDir as Vagrant's current working directory (VAGRANT_CWD) + * so that boxes are not duplicated for every Gradle project that use this VagrantTestPlugin. + */ + def vagrantEnvVars = [ + 'VAGRANT_CWD' : "${project.rootDir.absolutePath}", + 'VAGRANT_VAGRANTFILE' : 'Vagrantfile', + 'VAGRANT_PROJECT_DIR' : "${project.projectDir.absolutePath}" + ] + + // Each box gets it own set of tasks + for (String box : BOXES) { + String boxTask = box.capitalize().replace('-', '') + + // always add a halt task for all boxes, so clean makes sure they are all shutdown + Task halt = project.tasks.create("vagrant${boxTask}#halt", VagrantCommandTask) { + boxName box + environmentVars vagrantEnvVars + args 'halt', box + } + stop.dependsOn(halt) + if (project.extensions.esvagrant.boxes.contains(box) == false) { + // we only need a halt task if this box was not specified + continue; + } + + Task update = project.tasks.create("vagrant${boxTask}#update", VagrantCommandTask) { + boxName box + environmentVars vagrantEnvVars + args 'box', 'update', box + dependsOn vagrantCheckVersion, virtualboxCheckVersion, vagrantSetUp + } + + Task up = project.tasks.create("vagrant${boxTask}#up", VagrantCommandTask) { + boxName box + environmentVars vagrantEnvVars + /* Its important that we try to reprovision the box even if it already + exists. That way updates to the vagrant configuration take automatically. + That isn't to say that the updates will always be compatible. Its ok to + just destroy the boxes if they get busted but that is a manual step + because its slow-ish. */ + /* We lock the provider to virtualbox because the Vagrantfile specifies + lots of boxes that only work properly in virtualbox. Virtualbox is + vagrant's default but its possible to change that default and folks do. + But the boxes that we use are unlikely to work properly with other + virtualization providers. Thus the lock. */ + args 'up', box, '--provision', '--provider', 'virtualbox' + /* It'd be possible to check if the box is already up here and output + SKIPPED but that would require running vagrant status which is slow! */ + dependsOn update + } + + Task smoke = project.tasks.create("vagrant${boxTask}#smoketest", Exec) { + environment vagrantEnvVars + dependsOn up + finalizedBy halt + commandLine 'vagrant', 'ssh', box, '--command', + "set -o pipefail && echo 'Hello from ${project.path}' | sed -ue 's/^/ ${box}: /'" + } + vagrantSmokeTest.dependsOn(smoke) + + Task packaging = project.tasks.create("vagrant${boxTask}#packagingtest", BatsOverVagrantTask) { + boxName box + environmentVars vagrantEnvVars + dependsOn up + finalizedBy halt + command BATS_TEST_COMMAND + } + packagingTest.dependsOn(packaging) + } + } +} diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrant.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrant.properties new file mode 100644 index 00000000000..844310fa9d7 --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrant.properties @@ -0,0 +1 @@ +implementation-class=org.elasticsearch.gradle.vagrant.VagrantTestPlugin diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 9c1610741d9..62e9eec224d 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -840,7 +840,6 @@ - diff --git a/buildSrc/version.properties b/buildSrc/version.properties index bbf4170591d..7bede9f390e 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,11 +1,12 @@ elasticsearch = 6.0.0-alpha1 -lucene = 6.3.0-snapshot-a66a445 +lucene = 6.3.0 # optional dependencies spatial4j = 0.6 jts = 1.13 jackson = 2.8.1 snakeyaml = 1.15 +# When updating log4j, please update also docs/java-api/index.asciidoc log4j = 2.7 slf4j = 1.6.2 jna = 4.2.2 diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java index 343d3cf613a..ac45f20dc25 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java @@ -35,7 +35,7 @@ import java.util.List; public class NoopPlugin extends Plugin implements ActionPlugin { @Override - public List, ? extends ActionResponse>> getActions() { + public List> getActions() { return Arrays.asList( new ActionHandler<>(NoopBulkAction.INSTANCE, TransportNoopBulkAction.class), new ActionHandler<>(NoopSearchAction.INSTANCE, TransportNoopSearchAction.class) diff --git a/client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java b/client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java index 56b89db1694..84753e6f75c 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java +++ b/client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java @@ -38,25 +38,15 @@ import java.io.IOException; /** * Default implementation of {@link org.apache.http.nio.protocol.HttpAsyncResponseConsumer}. Buffers the whole * response content in heap memory, meaning that the size of the buffer is equal to the content-length of the response. - * Limits the size of responses that can be read to {@link #DEFAULT_BUFFER_LIMIT} by default, configurable value. - * Throws an exception in case the entity is longer than the configured buffer limit. + * Limits the size of responses that can be read based on a configurable argument. Throws an exception in case the entity is longer + * than the configured buffer limit. */ public class HeapBufferedAsyncResponseConsumer extends AbstractAsyncResponseConsumer { - //default buffer limit is 10MB - public static final int DEFAULT_BUFFER_LIMIT = 10 * 1024 * 1024; - private final int bufferLimitBytes; private volatile HttpResponse response; private volatile SimpleInputBuffer buf; - /** - * Creates a new instance of this consumer with a buffer limit of {@link #DEFAULT_BUFFER_LIMIT} - */ - public HeapBufferedAsyncResponseConsumer() { - this.bufferLimitBytes = DEFAULT_BUFFER_LIMIT; - } - /** * Creates a new instance of this consumer with the provided buffer limit */ diff --git a/client/rest/src/main/java/org/elasticsearch/client/HttpAsyncResponseConsumerFactory.java b/client/rest/src/main/java/org/elasticsearch/client/HttpAsyncResponseConsumerFactory.java new file mode 100644 index 00000000000..a5e5b39bed5 --- /dev/null +++ b/client/rest/src/main/java/org/elasticsearch/client/HttpAsyncResponseConsumerFactory.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.HttpResponse; +import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; + +import static org.elasticsearch.client.HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory.DEFAULT_BUFFER_LIMIT; + +/** + * Factory used to create instances of {@link HttpAsyncResponseConsumer}. Each request retry needs its own instance of the + * consumer object. Users can implement this interface and pass their own instance to the specialized + * performRequest methods that accept an {@link HttpAsyncResponseConsumerFactory} instance as argument. + */ +interface HttpAsyncResponseConsumerFactory { + + /** + * Creates the default type of {@link HttpAsyncResponseConsumer}, based on heap buffering with a buffer limit of 100MB. + */ + HttpAsyncResponseConsumerFactory DEFAULT = new HeapBufferedResponseConsumerFactory(DEFAULT_BUFFER_LIMIT); + + /** + * Creates the {@link HttpAsyncResponseConsumer}, called once per request attempt. + */ + HttpAsyncResponseConsumer createHttpAsyncResponseConsumer(); + + /** + * Default factory used to create instances of {@link HttpAsyncResponseConsumer}. + * Creates one instance of {@link HeapBufferedAsyncResponseConsumer} for each request attempt, with a configurable + * buffer limit which defaults to 100MB. + */ + class HeapBufferedResponseConsumerFactory implements HttpAsyncResponseConsumerFactory { + + //default buffer limit is 100MB + static final int DEFAULT_BUFFER_LIMIT = 100 * 1024 * 1024; + + private final int bufferLimit; + + public HeapBufferedResponseConsumerFactory(int bufferLimitBytes) { + this.bufferLimit = bufferLimitBytes; + } + + @Override + public HttpAsyncResponseConsumer createHttpAsyncResponseConsumer() { + return new HeapBufferedAsyncResponseConsumer(bufferLimit); + } + } +} diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index b8eb98b4aee..89c3309dbbd 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -143,7 +143,7 @@ public class RestClient implements Closeable { * @throws ResponseException in case Elasticsearch responded with a status code that indicated an error */ public Response performRequest(String method, String endpoint, Header... headers) throws IOException { - return performRequest(method, endpoint, Collections.emptyMap(), (HttpEntity)null, headers); + return performRequest(method, endpoint, Collections.emptyMap(), null, headers); } /** @@ -165,9 +165,9 @@ public class RestClient implements Closeable { /** * Sends a request to the Elasticsearch cluster that the client points to and waits for the corresponding response - * to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, Header...)} - * which doesn't require specifying an {@link HttpAsyncResponseConsumer} instance, {@link HeapBufferedAsyncResponseConsumer} - * will be used to consume the response body. + * to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumerFactory, Header...)} + * which doesn't require specifying an {@link HttpAsyncResponseConsumerFactory} instance, + * {@link HttpAsyncResponseConsumerFactory} will be used to create the needed instances of {@link HttpAsyncResponseConsumer}. * * @param method the http method * @param endpoint the path of the request (without host and port) @@ -181,8 +181,7 @@ public class RestClient implements Closeable { */ public Response performRequest(String method, String endpoint, Map params, HttpEntity entity, Header... headers) throws IOException { - HttpAsyncResponseConsumer responseConsumer = new HeapBufferedAsyncResponseConsumer(); - return performRequest(method, endpoint, params, entity, responseConsumer, headers); + return performRequest(method, endpoint, params, entity, HttpAsyncResponseConsumerFactory.DEFAULT, headers); } /** @@ -196,8 +195,9 @@ public class RestClient implements Closeable { * @param endpoint the path of the request (without host and port) * @param params the query_string parameters * @param entity the body of the request, null if not applicable - * @param responseConsumer the {@link HttpAsyncResponseConsumer} callback. Controls how the response - * body gets streamed from a non-blocking HTTP connection on the client side. + * @param httpAsyncResponseConsumerFactory the {@link HttpAsyncResponseConsumerFactory} used to create one + * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the response body gets streamed from a non-blocking HTTP + * connection on the client side. * @param headers the optional request headers * @return the response returned by Elasticsearch * @throws IOException in case of a problem or the connection was aborted @@ -205,10 +205,10 @@ public class RestClient implements Closeable { * @throws ResponseException in case Elasticsearch responded with a status code that indicated an error */ public Response performRequest(String method, String endpoint, Map params, - HttpEntity entity, HttpAsyncResponseConsumer responseConsumer, + HttpEntity entity, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory, Header... headers) throws IOException { SyncResponseListener listener = new SyncResponseListener(maxRetryTimeoutMillis); - performRequestAsync(method, endpoint, params, entity, responseConsumer, listener, headers); + performRequestAsync(method, endpoint, params, entity, httpAsyncResponseConsumerFactory, listener, headers); return listener.get(); } @@ -245,9 +245,9 @@ public class RestClient implements Closeable { /** * Sends a request to the Elasticsearch cluster that the client points to. Doesn't wait for the response, instead * the provided {@link ResponseListener} will be notified upon completion or failure. - * Shortcut to {@link #performRequestAsync(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, ResponseListener, Header...)} - * which doesn't require specifying an {@link HttpAsyncResponseConsumer} instance, {@link HeapBufferedAsyncResponseConsumer} - * will be used to consume the response body. + * Shortcut to {@link #performRequestAsync(String, String, Map, HttpEntity, HttpAsyncResponseConsumerFactory, ResponseListener, + * Header...)} which doesn't require specifying an {@link HttpAsyncResponseConsumerFactory} instance, + * {@link HttpAsyncResponseConsumerFactory} will be used to create the needed instances of {@link HttpAsyncResponseConsumer}. * * @param method the http method * @param endpoint the path of the request (without host and port) @@ -258,8 +258,7 @@ public class RestClient implements Closeable { */ public void performRequestAsync(String method, String endpoint, Map params, HttpEntity entity, ResponseListener responseListener, Header... headers) { - HttpAsyncResponseConsumer responseConsumer = new HeapBufferedAsyncResponseConsumer(); - performRequestAsync(method, endpoint, params, entity, responseConsumer, responseListener, headers); + performRequestAsync(method, endpoint, params, entity, HttpAsyncResponseConsumerFactory.DEFAULT, responseListener, headers); } /** @@ -274,29 +273,31 @@ public class RestClient implements Closeable { * @param endpoint the path of the request (without host and port) * @param params the query_string parameters * @param entity the body of the request, null if not applicable - * @param responseConsumer the {@link HttpAsyncResponseConsumer} callback. Controls how the response - * body gets streamed from a non-blocking HTTP connection on the client side. + * @param httpAsyncResponseConsumerFactory the {@link HttpAsyncResponseConsumerFactory} used to create one + * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the response body gets streamed from a non-blocking HTTP + * connection on the client side. * @param responseListener the {@link ResponseListener} to notify when the request is completed or fails * @param headers the optional request headers */ public void performRequestAsync(String method, String endpoint, Map params, - HttpEntity entity, HttpAsyncResponseConsumer responseConsumer, + HttpEntity entity, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory, ResponseListener responseListener, Header... headers) { URI uri = buildUri(pathPrefix, endpoint, params); HttpRequestBase request = createHttpRequest(method, uri, entity); setHeaders(request, headers); FailureTrackingResponseListener failureTrackingResponseListener = new FailureTrackingResponseListener(responseListener); long startTime = System.nanoTime(); - performRequestAsync(startTime, nextHost().iterator(), request, responseConsumer, failureTrackingResponseListener); + performRequestAsync(startTime, nextHost().iterator(), request, httpAsyncResponseConsumerFactory, failureTrackingResponseListener); } private void performRequestAsync(final long startTime, final Iterator hosts, final HttpRequestBase request, - final HttpAsyncResponseConsumer responseConsumer, + final HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory, final FailureTrackingResponseListener listener) { final HttpHost host = hosts.next(); //we stream the request body if the entity allows for it HttpAsyncRequestProducer requestProducer = HttpAsyncMethods.create(host, request); - client.execute(requestProducer, responseConsumer, new FutureCallback() { + HttpAsyncResponseConsumer asyncResponseConsumer = httpAsyncResponseConsumerFactory.createHttpAsyncResponseConsumer(); + client.execute(requestProducer, asyncResponseConsumer, new FutureCallback() { @Override public void completed(HttpResponse httpResponse) { try { @@ -346,7 +347,7 @@ public class RestClient implements Closeable { } else { listener.trackFailure(exception); request.reset(); - performRequestAsync(startTime, hosts, request, responseConsumer, listener); + performRequestAsync(startTime, hosts, request, httpAsyncResponseConsumerFactory, listener); } } else { listener.onDefinitiveFailure(exception); diff --git a/client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java b/client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java index d30a9e00b53..2488ea4b435 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java @@ -32,7 +32,6 @@ import org.apache.http.nio.ContentDecoder; import org.apache.http.nio.IOControl; import org.apache.http.protocol.HttpContext; -import static org.elasticsearch.client.HeapBufferedAsyncResponseConsumer.DEFAULT_BUFFER_LIMIT; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; @@ -45,13 +44,14 @@ public class HeapBufferedAsyncResponseConsumerTests extends RestClientTestCase { //maximum buffer that this test ends up allocating is 50MB private static final int MAX_TEST_BUFFER_SIZE = 50 * 1024 * 1024; + private static final int TEST_BUFFER_LIMIT = 10 * 1024 * 1024; public void testResponseProcessing() throws Exception { ContentDecoder contentDecoder = mock(ContentDecoder.class); IOControl ioControl = mock(IOControl.class); HttpContext httpContext = mock(HttpContext.class); - HeapBufferedAsyncResponseConsumer consumer = spy(new HeapBufferedAsyncResponseConsumer()); + HeapBufferedAsyncResponseConsumer consumer = spy(new HeapBufferedAsyncResponseConsumer(TEST_BUFFER_LIMIT)); ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, "OK"); @@ -74,8 +74,8 @@ public class HeapBufferedAsyncResponseConsumerTests extends RestClientTestCase { } public void testDefaultBufferLimit() throws Exception { - HeapBufferedAsyncResponseConsumer consumer = new HeapBufferedAsyncResponseConsumer(); - bufferLimitTest(consumer, DEFAULT_BUFFER_LIMIT); + HeapBufferedAsyncResponseConsumer consumer = new HeapBufferedAsyncResponseConsumer(TEST_BUFFER_LIMIT); + bufferLimitTest(consumer, TEST_BUFFER_LIMIT); } public void testConfiguredBufferLimit() throws Exception { diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java new file mode 100644 index 00000000000..f997f798712 --- /dev/null +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java @@ -0,0 +1,210 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; +import org.apache.http.HttpHost; +import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.client.RestClientTestUtil.getAllStatusCodes; +import static org.elasticsearch.client.RestClientTestUtil.randomErrorNoRetryStatusCode; +import static org.elasticsearch.client.RestClientTestUtil.randomOkStatusCode; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}. + * Works against real http servers, multiple hosts. Also tests failover by randomly shutting down hosts. + */ +//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes +@IgnoreJRERequirement +public class RestClientMultipleHostsIntegTests extends RestClientTestCase { + + private static HttpServer[] httpServers; + private static RestClient restClient; + private static String pathPrefix; + + @BeforeClass + public static void startHttpServer() throws Exception { + String pathPrefixWithoutLeadingSlash; + if (randomBoolean()) { + pathPrefixWithoutLeadingSlash = "testPathPrefix/" + randomAsciiOfLengthBetween(1, 5); + pathPrefix = "/" + pathPrefixWithoutLeadingSlash; + } else { + pathPrefix = pathPrefixWithoutLeadingSlash = ""; + } + int numHttpServers = randomIntBetween(2, 4); + httpServers = new HttpServer[numHttpServers]; + HttpHost[] httpHosts = new HttpHost[numHttpServers]; + for (int i = 0; i < numHttpServers; i++) { + HttpServer httpServer = createHttpServer(); + httpServers[i] = httpServer; + httpHosts[i] = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()); + } + RestClientBuilder restClientBuilder = RestClient.builder(httpHosts); + if (pathPrefix.length() > 0) { + restClientBuilder.setPathPrefix((randomBoolean() ? "/" : "") + pathPrefixWithoutLeadingSlash); + } + restClient = restClientBuilder.build(); + } + + private static HttpServer createHttpServer() throws Exception { + HttpServer httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + httpServer.start(); + //returns a different status code depending on the path + for (int statusCode : getAllStatusCodes()) { + httpServer.createContext(pathPrefix + "/" + statusCode, new ResponseHandler(statusCode)); + } + return httpServer; + } + + //animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes + @IgnoreJRERequirement + private static class ResponseHandler implements HttpHandler { + private final int statusCode; + + ResponseHandler(int statusCode) { + this.statusCode = statusCode; + } + + @Override + public void handle(HttpExchange httpExchange) throws IOException { + httpExchange.getRequestBody().close(); + httpExchange.sendResponseHeaders(statusCode, -1); + httpExchange.close(); + } + } + + @AfterClass + public static void stopHttpServers() throws IOException { + restClient.close(); + restClient = null; + for (HttpServer httpServer : httpServers) { + httpServer.stop(0); + } + httpServers = null; + } + + @Before + public void stopRandomHost() { + //verify that shutting down some hosts doesn't matter as long as one working host is left behind + if (httpServers.length > 1 && randomBoolean()) { + List updatedHttpServers = new ArrayList<>(httpServers.length - 1); + int nodeIndex = randomInt(httpServers.length - 1); + for (int i = 0; i < httpServers.length; i++) { + HttpServer httpServer = httpServers[i]; + if (i == nodeIndex) { + httpServer.stop(0); + } else { + updatedHttpServers.add(httpServer); + } + } + httpServers = updatedHttpServers.toArray(new HttpServer[updatedHttpServers.size()]); + } + } + + public void testSyncRequests() throws IOException { + int numRequests = randomIntBetween(5, 20); + for (int i = 0; i < numRequests; i++) { + final String method = RestClientTestUtil.randomHttpMethod(getRandom()); + //we don't test status codes that are subject to retries as they interfere with hosts being stopped + final int statusCode = randomBoolean() ? randomOkStatusCode(getRandom()) : randomErrorNoRetryStatusCode(getRandom()); + Response response; + try { + response = restClient.performRequest(method, "/" + statusCode); + } catch(ResponseException responseException) { + response = responseException.getResponse(); + } + assertEquals(method, response.getRequestLine().getMethod()); + assertEquals(statusCode, response.getStatusLine().getStatusCode()); + assertEquals((pathPrefix.length() > 0 ? pathPrefix : "") + "/" + statusCode, response.getRequestLine().getUri()); + } + } + + public void testAsyncRequests() throws Exception { + int numRequests = randomIntBetween(5, 20); + final CountDownLatch latch = new CountDownLatch(numRequests); + final List responses = new CopyOnWriteArrayList<>(); + for (int i = 0; i < numRequests; i++) { + final String method = RestClientTestUtil.randomHttpMethod(getRandom()); + //we don't test status codes that are subject to retries as they interfere with hosts being stopped + final int statusCode = randomBoolean() ? randomOkStatusCode(getRandom()) : randomErrorNoRetryStatusCode(getRandom()); + restClient.performRequestAsync(method, "/" + statusCode, new ResponseListener() { + @Override + public void onSuccess(Response response) { + responses.add(new TestResponse(method, statusCode, response)); + latch.countDown(); + } + + @Override + public void onFailure(Exception exception) { + responses.add(new TestResponse(method, statusCode, exception)); + latch.countDown(); + } + }); + } + assertTrue(latch.await(5, TimeUnit.SECONDS)); + + assertEquals(numRequests, responses.size()); + for (TestResponse testResponse : responses) { + Response response = testResponse.getResponse(); + assertEquals(testResponse.method, response.getRequestLine().getMethod()); + assertEquals(testResponse.statusCode, response.getStatusLine().getStatusCode()); + assertEquals((pathPrefix.length() > 0 ? pathPrefix : "") + "/" + testResponse.statusCode, + response.getRequestLine().getUri()); + } + } + + private static class TestResponse { + private final String method; + private final int statusCode; + private final Object response; + + TestResponse(String method, int statusCode, Object response) { + this.method = method; + this.statusCode = statusCode; + this.response = response; + } + + Response getResponse() { + if (response instanceof Response) { + return (Response) response; + } + if (response instanceof ResponseException) { + return ((ResponseException) response).getResponse(); + } + throw new AssertionError("unexpected response " + response.getClass()); + } + } +} diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java similarity index 62% rename from client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java rename to client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java index 941af2246f8..4440c1e8f97 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.client; import com.sun.net.httpserver.Headers; -import com.sun.net.httpserver.HttpContext; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; @@ -45,19 +44,13 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; import static org.elasticsearch.client.RestClientTestUtil.getAllStatusCodes; import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods; import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode; -import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; /** * Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}. @@ -65,28 +58,42 @@ import static org.junit.Assert.fail; */ //animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes @IgnoreJRERequirement -public class RestClientIntegTests extends RestClientTestCase { +public class RestClientSingleHostIntegTests extends RestClientTestCase { private static HttpServer httpServer; private static RestClient restClient; + private static String pathPrefix; private static Header[] defaultHeaders; @BeforeClass public static void startHttpServer() throws Exception { - httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + String pathPrefixWithoutLeadingSlash; + if (randomBoolean()) { + pathPrefixWithoutLeadingSlash = "testPathPrefix/" + randomAsciiOfLengthBetween(1, 5); + pathPrefix = "/" + pathPrefixWithoutLeadingSlash; + } else { + pathPrefix = pathPrefixWithoutLeadingSlash = ""; + } + + httpServer = createHttpServer(); + int numHeaders = randomIntBetween(0, 5); + defaultHeaders = generateHeaders("Header-default", "Header-array", numHeaders); + RestClientBuilder restClientBuilder = RestClient.builder( + new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())).setDefaultHeaders(defaultHeaders); + if (pathPrefix.length() > 0) { + restClientBuilder.setPathPrefix((randomBoolean() ? "/" : "") + pathPrefixWithoutLeadingSlash); + } + restClient = restClientBuilder.build(); + } + + private static HttpServer createHttpServer() throws Exception { + HttpServer httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); httpServer.start(); //returns a different status code depending on the path for (int statusCode : getAllStatusCodes()) { - createStatusCodeContext(httpServer, statusCode); + httpServer.createContext(pathPrefix + "/" + statusCode, new ResponseHandler(statusCode)); } - int numHeaders = randomIntBetween(0, 5); - defaultHeaders = generateHeaders("Header-default", "Header-array", numHeaders); - restClient = RestClient.builder(new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())) - .setDefaultHeaders(defaultHeaders).build(); - } - - private static void createStatusCodeContext(HttpServer httpServer, final int statusCode) { - httpServer.createContext("/" + statusCode, new ResponseHandler(statusCode)); + return httpServer; } //animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes @@ -157,7 +164,11 @@ public class RestClientIntegTests extends RestClientTestCase { } catch(ResponseException e) { esResponse = e.getResponse(); } - assertThat(esResponse.getStatusLine().getStatusCode(), equalTo(statusCode)); + + assertEquals(method, esResponse.getRequestLine().getMethod()); + assertEquals(statusCode, esResponse.getStatusLine().getStatusCode()); + assertEquals((pathPrefix.length() > 0 ? pathPrefix : "") + "/" + statusCode, esResponse.getRequestLine().getUri()); + for (final Header responseHeader : esResponse.getHeaders()) { final String name = responseHeader.getName(); final String value = responseHeader.getValue(); @@ -197,49 +208,6 @@ public class RestClientIntegTests extends RestClientTestCase { bodyTest("GET"); } - /** - * Ensure that pathPrefix works as expected. - */ - public void testPathPrefix() throws IOException { - // guarantee no other test setup collides with this one and lets it sneak through - final String uniqueContextSuffix = "/testPathPrefix"; - final String pathPrefix = "base/" + randomAsciiOfLengthBetween(1, 5) + "/"; - final int statusCode = randomStatusCode(getRandom()); - - final HttpContext context = - httpServer.createContext("/" + pathPrefix + statusCode + uniqueContextSuffix, new ResponseHandler(statusCode)); - - try (final RestClient client = - RestClient.builder(new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())) - .setPathPrefix((randomBoolean() ? "/" : "") + pathPrefix).build()) { - - for (final String method : getHttpMethods()) { - Response esResponse; - try { - esResponse = client.performRequest(method, "/" + statusCode + uniqueContextSuffix); - } catch(ResponseException e) { - esResponse = e.getResponse(); - } - - assertThat(esResponse.getRequestLine().getUri(), equalTo("/" + pathPrefix + statusCode + uniqueContextSuffix)); - assertThat(esResponse.getStatusLine().getStatusCode(), equalTo(statusCode)); - } - } finally { - httpServer.removeContext(context); - } - } - - public void testPath() throws IOException { - for (String method : getHttpMethods()) { - try { - restClient.performRequest(method, null); - fail("path set to null should fail!"); - } catch (NullPointerException e) { - assertEquals("path must not be null", e.getMessage()); - } - } - } - private void bodyTest(String method) throws IOException { String requestBody = "{ \"field\": \"value\" }"; StringEntity entity = new StringEntity(requestBody); @@ -250,60 +218,9 @@ public class RestClientIntegTests extends RestClientTestCase { } catch(ResponseException e) { esResponse = e.getResponse(); } + assertEquals(method, esResponse.getRequestLine().getMethod()); assertEquals(statusCode, esResponse.getStatusLine().getStatusCode()); + assertEquals((pathPrefix.length() > 0 ? pathPrefix : "") + "/" + statusCode, esResponse.getRequestLine().getUri()); assertEquals(requestBody, EntityUtils.toString(esResponse.getEntity())); } - - public void testAsyncRequests() throws Exception { - int numRequests = randomIntBetween(5, 20); - final CountDownLatch latch = new CountDownLatch(numRequests); - final List responses = new CopyOnWriteArrayList<>(); - for (int i = 0; i < numRequests; i++) { - final String method = RestClientTestUtil.randomHttpMethod(getRandom()); - final int statusCode = randomStatusCode(getRandom()); - restClient.performRequestAsync(method, "/" + statusCode, new ResponseListener() { - @Override - public void onSuccess(Response response) { - responses.add(new TestResponse(method, statusCode, response)); - latch.countDown(); - } - - @Override - public void onFailure(Exception exception) { - responses.add(new TestResponse(method, statusCode, exception)); - latch.countDown(); - } - }); - } - assertTrue(latch.await(5, TimeUnit.SECONDS)); - - assertEquals(numRequests, responses.size()); - for (TestResponse response : responses) { - assertEquals(response.method, response.getResponse().getRequestLine().getMethod()); - assertEquals(response.statusCode, response.getResponse().getStatusLine().getStatusCode()); - - } - } - - private static class TestResponse { - private final String method; - private final int statusCode; - private final Object response; - - TestResponse(String method, int statusCode, Object response) { - this.method = method; - this.statusCode = statusCode; - this.response = response; - } - - Response getResponse() { - if (response instanceof Response) { - return (Response) response; - } - if (response instanceof ResponseException) { - return ((ResponseException) response).getResponse(); - } - throw new AssertionError("unexpected response " + response.getClass()); - } - } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index 92e2b0da971..ce0d6d0936e 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -139,6 +139,17 @@ public class RestClientSingleHostTests extends RestClientTestCase { restClient = new RestClient(httpClient, 10000, defaultHeaders, new HttpHost[]{httpHost}, null, failureListener); } + public void testNullPath() throws IOException { + for (String method : getHttpMethods()) { + try { + restClient.performRequest(method, null); + fail("path set to null should fail!"); + } catch (NullPointerException e) { + assertEquals("path must not be null", e.getMessage()); + } + } + } + /** * Verifies the content of the {@link HttpRequest} that's internally created and passed through to the http client */ diff --git a/core/build.gradle b/core/build.gradle index 0726666e0cf..7a580335571 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -62,10 +62,7 @@ dependencies { compile 'com.carrotsearch:hppc:0.7.1' // time handling, remove with java 8 time - compile 'joda-time:joda-time:2.9.4' - // joda 2.0 moved to using volatile fields for datetime - // When updating to a new version, make sure to update our copy of BaseDateTime - compile 'org.joda:joda-convert:1.2' + compile 'joda-time:joda-time:2.9.5' // json and yaml compile "org.yaml:snakeyaml:${versions.snakeyaml}" diff --git a/core/licenses/joda-convert-1.2.jar.sha1 b/core/licenses/joda-convert-1.2.jar.sha1 deleted file mode 100644 index 37c3e870580..00000000000 --- a/core/licenses/joda-convert-1.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -35ec554f0cd00c956cc69051514d9488b1374dec diff --git a/core/licenses/joda-convert-LICENSE.txt b/core/licenses/joda-convert-LICENSE.txt deleted file mode 100644 index 75b52484ea4..00000000000 --- a/core/licenses/joda-convert-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/core/licenses/joda-convert-NOTICE.txt b/core/licenses/joda-convert-NOTICE.txt deleted file mode 100644 index dffbcf31cac..00000000000 --- a/core/licenses/joda-convert-NOTICE.txt +++ /dev/null @@ -1,5 +0,0 @@ -============================================================================= -= NOTICE file corresponding to section 4d of the Apache License Version 2.0 = -============================================================================= -This product includes software developed by -Joda.org (http://www.joda.org/). diff --git a/core/licenses/joda-time-2.9.4.jar.sha1 b/core/licenses/joda-time-2.9.4.jar.sha1 deleted file mode 100644 index e9ea891bfee..00000000000 --- a/core/licenses/joda-time-2.9.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1c295b462f16702ebe720bbb08f62e1ba80da41b \ No newline at end of file diff --git a/core/licenses/joda-time-2.9.5.jar.sha1 b/core/licenses/joda-time-2.9.5.jar.sha1 new file mode 100644 index 00000000000..ecf1c781556 --- /dev/null +++ b/core/licenses/joda-time-2.9.5.jar.sha1 @@ -0,0 +1 @@ +5f01da7306363fad2028b916f3eab926262de928 \ No newline at end of file diff --git a/core/licenses/lucene-analyzers-common-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-analyzers-common-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 1626a88f4a2..00000000000 --- a/core/licenses/lucene-analyzers-common-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -61aacb657e44a9beabf95834e106bbb96373a703 \ No newline at end of file diff --git a/core/licenses/lucene-analyzers-common-6.3.0.jar.sha1 b/core/licenses/lucene-analyzers-common-6.3.0.jar.sha1 new file mode 100644 index 00000000000..77d6e83314f --- /dev/null +++ b/core/licenses/lucene-analyzers-common-6.3.0.jar.sha1 @@ -0,0 +1 @@ +494aed699af238c3872a6b65e17939e9cb7ddbe0 \ No newline at end of file diff --git a/core/licenses/lucene-backward-codecs-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-backward-codecs-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 2f45d50eeee..00000000000 --- a/core/licenses/lucene-backward-codecs-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -600de75a81e259cab0384e546d9a1d527ddba6d6 \ No newline at end of file diff --git a/core/licenses/lucene-backward-codecs-6.3.0.jar.sha1 b/core/licenses/lucene-backward-codecs-6.3.0.jar.sha1 new file mode 100644 index 00000000000..8d1640eecf8 --- /dev/null +++ b/core/licenses/lucene-backward-codecs-6.3.0.jar.sha1 @@ -0,0 +1 @@ +77dede7dff1b833ca2e92d8ab137edb209354d9b \ No newline at end of file diff --git a/core/licenses/lucene-core-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-core-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 9dcdbeb40e9..00000000000 --- a/core/licenses/lucene-core-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -188774468a56a8731ca639527d721060d26ffebd \ No newline at end of file diff --git a/core/licenses/lucene-core-6.3.0.jar.sha1 b/core/licenses/lucene-core-6.3.0.jar.sha1 new file mode 100644 index 00000000000..b9f5ccfb8d8 --- /dev/null +++ b/core/licenses/lucene-core-6.3.0.jar.sha1 @@ -0,0 +1 @@ +d3c87ea89e2f83e401f9cc7f14e4c43945f7f1e1 \ No newline at end of file diff --git a/core/licenses/lucene-grouping-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-grouping-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 14c8d7aa2b7..00000000000 --- a/core/licenses/lucene-grouping-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5afd9271e3d8f645440f48ff2487545ae5573e7e \ No newline at end of file diff --git a/core/licenses/lucene-grouping-6.3.0.jar.sha1 b/core/licenses/lucene-grouping-6.3.0.jar.sha1 new file mode 100644 index 00000000000..003c3801acd --- /dev/null +++ b/core/licenses/lucene-grouping-6.3.0.jar.sha1 @@ -0,0 +1 @@ +2c96d59e318ea66838aeb9c5cfb8b4d27b40953c \ No newline at end of file diff --git a/core/licenses/lucene-highlighter-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-highlighter-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index e695284756d..00000000000 --- a/core/licenses/lucene-highlighter-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0f575175e26d4d3b1095f6300cbefbbb3ee994cd \ No newline at end of file diff --git a/core/licenses/lucene-highlighter-6.3.0.jar.sha1 b/core/licenses/lucene-highlighter-6.3.0.jar.sha1 new file mode 100644 index 00000000000..0a7d5deac0c --- /dev/null +++ b/core/licenses/lucene-highlighter-6.3.0.jar.sha1 @@ -0,0 +1 @@ +4f154d8badfe47fe45503c18fb30f2177f758794 \ No newline at end of file diff --git a/core/licenses/lucene-join-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-join-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index ad02b0cac3b..00000000000 --- a/core/licenses/lucene-join-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ee898c3d318681c9f29c56e6d9b52876be96d814 \ No newline at end of file diff --git a/core/licenses/lucene-join-6.3.0.jar.sha1 b/core/licenses/lucene-join-6.3.0.jar.sha1 new file mode 100644 index 00000000000..df43f249d16 --- /dev/null +++ b/core/licenses/lucene-join-6.3.0.jar.sha1 @@ -0,0 +1 @@ +79b898117dcfde2981ec6806e420ff218842eca8 \ No newline at end of file diff --git a/core/licenses/lucene-memory-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-memory-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 0e36d650670..00000000000 --- a/core/licenses/lucene-memory-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ea6defd322456711394b4dabcda70a217e3caacd \ No newline at end of file diff --git a/core/licenses/lucene-memory-6.3.0.jar.sha1 b/core/licenses/lucene-memory-6.3.0.jar.sha1 new file mode 100644 index 00000000000..a8a4e5f1dd9 --- /dev/null +++ b/core/licenses/lucene-memory-6.3.0.jar.sha1 @@ -0,0 +1 @@ +89edeb404e507d640cb13903acff6953199704a2 \ No newline at end of file diff --git a/core/licenses/lucene-misc-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-misc-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index e458570651a..00000000000 --- a/core/licenses/lucene-misc-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ea2de7f9753a8e19a1ec9f25a3ea65d7ce909a0e \ No newline at end of file diff --git a/core/licenses/lucene-misc-6.3.0.jar.sha1 b/core/licenses/lucene-misc-6.3.0.jar.sha1 new file mode 100644 index 00000000000..de4685d9564 --- /dev/null +++ b/core/licenses/lucene-misc-6.3.0.jar.sha1 @@ -0,0 +1 @@ +02d0e1f5a9df15ac911ad495bad5ea253ab50a9f \ No newline at end of file diff --git a/core/licenses/lucene-queries-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-queries-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 1231424e3be..00000000000 --- a/core/licenses/lucene-queries-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b15c6f29bfb9ec14a4615013a94bfa43a63793d \ No newline at end of file diff --git a/core/licenses/lucene-queries-6.3.0.jar.sha1 b/core/licenses/lucene-queries-6.3.0.jar.sha1 new file mode 100644 index 00000000000..8bf5b45a4ea --- /dev/null +++ b/core/licenses/lucene-queries-6.3.0.jar.sha1 @@ -0,0 +1 @@ +eb7938233c8103223069c7b5b5f785b4d20ddafa \ No newline at end of file diff --git a/core/licenses/lucene-queryparser-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-queryparser-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index a367f4e45cf..00000000000 --- a/core/licenses/lucene-queryparser-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d89d9fa1036c38144e0b8db079ae959353847c86 \ No newline at end of file diff --git a/core/licenses/lucene-queryparser-6.3.0.jar.sha1 b/core/licenses/lucene-queryparser-6.3.0.jar.sha1 new file mode 100644 index 00000000000..e2dae1cc8b0 --- /dev/null +++ b/core/licenses/lucene-queryparser-6.3.0.jar.sha1 @@ -0,0 +1 @@ +e979fb02155cbe81a8d335d6dc41d2ef06be68b6 \ No newline at end of file diff --git a/core/licenses/lucene-sandbox-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-sandbox-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 4c8874c0b4b..00000000000 --- a/core/licenses/lucene-sandbox-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c003c1ab0a19a02b30156ce13372cff1001d6a7d \ No newline at end of file diff --git a/core/licenses/lucene-sandbox-6.3.0.jar.sha1 b/core/licenses/lucene-sandbox-6.3.0.jar.sha1 new file mode 100644 index 00000000000..6baf6baabfe --- /dev/null +++ b/core/licenses/lucene-sandbox-6.3.0.jar.sha1 @@ -0,0 +1 @@ +257387c45c6fa2b77fd6931751f93fdcd798ced4 \ No newline at end of file diff --git a/core/licenses/lucene-spatial-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-spatial-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 75dd8263828..00000000000 --- a/core/licenses/lucene-spatial-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a3c570bf588d7c9ca43d074db9ce9c9b8408b930 \ No newline at end of file diff --git a/core/licenses/lucene-spatial-6.3.0.jar.sha1 b/core/licenses/lucene-spatial-6.3.0.jar.sha1 new file mode 100644 index 00000000000..ff35a066ffd --- /dev/null +++ b/core/licenses/lucene-spatial-6.3.0.jar.sha1 @@ -0,0 +1 @@ +3cf5fe5402b5e34b240b73501c9e97a82428259e \ No newline at end of file diff --git a/core/licenses/lucene-spatial-extras-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-spatial-extras-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index debd8e0b873..00000000000 --- a/core/licenses/lucene-spatial-extras-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -de54ca61f5892cf2c88ac083b3332a827beca7ff \ No newline at end of file diff --git a/core/licenses/lucene-spatial-extras-6.3.0.jar.sha1 b/core/licenses/lucene-spatial-extras-6.3.0.jar.sha1 new file mode 100644 index 00000000000..0c52cf09377 --- /dev/null +++ b/core/licenses/lucene-spatial-extras-6.3.0.jar.sha1 @@ -0,0 +1 @@ +1b77ef3740dc885c62d5966fbe9aea1199d344fb \ No newline at end of file diff --git a/core/licenses/lucene-spatial3d-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-spatial3d-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index b9eb9a0c270..00000000000 --- a/core/licenses/lucene-spatial3d-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cacdf81b324acd335be63798d5a3dd16e7dff9a3 \ No newline at end of file diff --git a/core/licenses/lucene-spatial3d-6.3.0.jar.sha1 b/core/licenses/lucene-spatial3d-6.3.0.jar.sha1 new file mode 100644 index 00000000000..c23003146af --- /dev/null +++ b/core/licenses/lucene-spatial3d-6.3.0.jar.sha1 @@ -0,0 +1 @@ +aa94b4a8636b3633008640cc5155ad354aebcea5 \ No newline at end of file diff --git a/core/licenses/lucene-suggest-6.3.0-snapshot-a66a445.jar.sha1 b/core/licenses/lucene-suggest-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index a6517bc7d42..00000000000 --- a/core/licenses/lucene-suggest-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a5cb3723bc8e0db185fc43e57b648145de27fde8 \ No newline at end of file diff --git a/core/licenses/lucene-suggest-6.3.0.jar.sha1 b/core/licenses/lucene-suggest-6.3.0.jar.sha1 new file mode 100644 index 00000000000..137b8976536 --- /dev/null +++ b/core/licenses/lucene-suggest-6.3.0.jar.sha1 @@ -0,0 +1 @@ +ed5d8ee5cd7edcad5d4ffca2b4540ccc844e9bb0 \ No newline at end of file diff --git a/core/src/main/java/org/apache/lucene/index/XPointValues.java b/core/src/main/java/org/apache/lucene/index/XPointValues.java deleted file mode 100644 index c4fa0b4d623..00000000000 --- a/core/src/main/java/org/apache/lucene/index/XPointValues.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.lucene.index; -import org.apache.lucene.util.StringHelper; - -import java.io.IOException; - -/** - * Forked utility methods from Lucene's PointValues until LUCENE-7257 is released. - */ -public class XPointValues { - /** Return the cumulated number of points across all leaves of the given - * {@link IndexReader}. Leaves that do not have points for the given field - * are ignored. - * @see PointValues#size(String) */ - public static long size(IndexReader reader, String field) throws IOException { - long size = 0; - for (LeafReaderContext ctx : reader.leaves()) { - FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field); - if (info == null || info.getPointDimensionCount() == 0) { - continue; - } - PointValues values = ctx.reader().getPointValues(); - size += values.size(field); - } - return size; - } - - /** Return the cumulated number of docs that have points across all leaves - * of the given {@link IndexReader}. Leaves that do not have points for the - * given field are ignored. - * @see PointValues#getDocCount(String) */ - public static int getDocCount(IndexReader reader, String field) throws IOException { - int count = 0; - for (LeafReaderContext ctx : reader.leaves()) { - FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field); - if (info == null || info.getPointDimensionCount() == 0) { - continue; - } - PointValues values = ctx.reader().getPointValues(); - count += values.getDocCount(field); - } - return count; - } - - /** Return the minimum packed values across all leaves of the given - * {@link IndexReader}. Leaves that do not have points for the given field - * are ignored. - * @see PointValues#getMinPackedValue(String) */ - public static byte[] getMinPackedValue(IndexReader reader, String field) throws IOException { - byte[] minValue = null; - for (LeafReaderContext ctx : reader.leaves()) { - FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field); - if (info == null || info.getPointDimensionCount() == 0) { - continue; - } - PointValues values = ctx.reader().getPointValues(); - byte[] leafMinValue = values.getMinPackedValue(field); - if (leafMinValue == null) { - continue; - } - if (minValue == null) { - minValue = leafMinValue.clone(); - } else { - final int numDimensions = values.getNumDimensions(field); - final int numBytesPerDimension = values.getBytesPerDimension(field); - for (int i = 0; i < numDimensions; ++i) { - int offset = i * numBytesPerDimension; - if (StringHelper.compare(numBytesPerDimension, leafMinValue, offset, minValue, offset) < 0) { - System.arraycopy(leafMinValue, offset, minValue, offset, numBytesPerDimension); - } - } - } - } - return minValue; - } - - /** Return the maximum packed values across all leaves of the given - * {@link IndexReader}. Leaves that do not have points for the given field - * are ignored. - * @see PointValues#getMaxPackedValue(String) */ - public static byte[] getMaxPackedValue(IndexReader reader, String field) throws IOException { - byte[] maxValue = null; - for (LeafReaderContext ctx : reader.leaves()) { - FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field); - if (info == null || info.getPointDimensionCount() == 0) { - continue; - } - PointValues values = ctx.reader().getPointValues(); - byte[] leafMaxValue = values.getMaxPackedValue(field); - if (leafMaxValue == null) { - continue; - } - if (maxValue == null) { - maxValue = leafMaxValue.clone(); - } else { - final int numDimensions = values.getNumDimensions(field); - final int numBytesPerDimension = values.getBytesPerDimension(field); - for (int i = 0; i < numDimensions; ++i) { - int offset = i * numBytesPerDimension; - if (StringHelper.compare(numBytesPerDimension, leafMaxValue, offset, maxValue, offset) > 0) { - System.arraycopy(leafMaxValue, offset, maxValue, offset, numBytesPerDimension); - } - } - } - } - return maxValue; - } - - /** Default constructor */ - private XPointValues() { - } -} diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index eb33dbe4b18..8a0b4f4e00b 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -523,16 +523,14 @@ public class ElasticsearchException extends RuntimeException implements ToXConte org.elasticsearch.index.shard.IndexShardRelocatedException::new, 45), NODE_SHOULD_NOT_CONNECT_EXCEPTION(org.elasticsearch.transport.NodeShouldNotConnectException.class, org.elasticsearch.transport.NodeShouldNotConnectException::new, 46), - INDEX_TEMPLATE_ALREADY_EXISTS_EXCEPTION(org.elasticsearch.indices.IndexTemplateAlreadyExistsException.class, - org.elasticsearch.indices.IndexTemplateAlreadyExistsException::new, 47), + // 47 used to be for IndexTemplateAlreadyExistsException which was deprecated in 5.1 removed in 6.0 TRANSLOG_CORRUPTED_EXCEPTION(org.elasticsearch.index.translog.TranslogCorruptedException.class, org.elasticsearch.index.translog.TranslogCorruptedException::new, 48), CLUSTER_BLOCK_EXCEPTION(org.elasticsearch.cluster.block.ClusterBlockException.class, org.elasticsearch.cluster.block.ClusterBlockException::new, 49), FETCH_PHASE_EXECUTION_EXCEPTION(org.elasticsearch.search.fetch.FetchPhaseExecutionException.class, org.elasticsearch.search.fetch.FetchPhaseExecutionException::new, 50), - INDEX_SHARD_ALREADY_EXISTS_EXCEPTION(org.elasticsearch.index.IndexShardAlreadyExistsException.class, - org.elasticsearch.index.IndexShardAlreadyExistsException::new, 51), + // 51 used to be for IndexShardAlreadyExistsException which was deprecated in 5.1 removed in 6.0 VERSION_CONFLICT_ENGINE_EXCEPTION(org.elasticsearch.index.engine.VersionConflictEngineException.class, org.elasticsearch.index.engine.VersionConflictEngineException::new, 52), ENGINE_EXCEPTION(org.elasticsearch.index.engine.EngineException.class, org.elasticsearch.index.engine.EngineException::new, 53), @@ -553,7 +551,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper::new, 62), ALIAS_FILTER_PARSING_EXCEPTION(org.elasticsearch.indices.AliasFilterParsingException.class, org.elasticsearch.indices.AliasFilterParsingException::new, 63), - // 64 was DeleteByQueryFailedEngineException, which was removed in 3.0 + // 64 was DeleteByQueryFailedEngineException, which was removed in 5.0 GATEWAY_EXCEPTION(org.elasticsearch.gateway.GatewayException.class, org.elasticsearch.gateway.GatewayException::new, 65), INDEX_SHARD_NOT_RECOVERING_EXCEPTION(org.elasticsearch.index.shard.IndexShardNotRecoveringException.class, org.elasticsearch.index.shard.IndexShardNotRecoveringException::new, 66), diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index e9e950ce80a..2d61fb8194a 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -19,6 +19,7 @@ package org.elasticsearch; +import org.apache.lucene.util.MathUtil; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; @@ -298,7 +299,27 @@ public class Version { * is a beta or RC release then the version itself is returned. */ public Version minimumCompatibilityVersion() { - return Version.smallest(this, fromId(major * 1000000 + 99)); + final int bwcMajor; + final int bwcMinor; + if (this.onOrAfter(Version.V_6_0_0_alpha1)) { + bwcMajor = major-1; + bwcMinor = 0; // TODO we have to move this to the latest released minor of the last major but for now we just keep + } else { + bwcMajor = major; + bwcMinor = 0; + } + return Version.smallest(this, fromId(bwcMajor * 1000000 + bwcMinor * 10000 + 99)); + } + + /** + * Returns true iff both version are compatible. Otherwise false + */ + public boolean isCompatible(Version version) { + boolean compatible = onOrAfter(version.minimumCompatibilityVersion()) + && version.onOrAfter(minimumCompatibilityVersion()); + + assert compatible == false || Math.max(major, version.major) - Math.min(major, version.major) <= 1; + return compatible; } @SuppressForbidden(reason = "System.out.*") diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index a3797c3cb88..0097db4b7c4 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -356,7 +356,7 @@ public class ActionModule extends AbstractModule { register(handler.getAction().name(), handler); } - public , Response extends ActionResponse> void register( + public void register( GenericAction action, Class> transportAction, Class... supportTransportActions) { register(new ActionHandler<>(action, transportAction, supportTransportActions)); diff --git a/core/src/main/java/org/elasticsearch/action/ActionRequest.java b/core/src/main/java/org/elasticsearch/action/ActionRequest.java index e8dd639c4df..769b2e7b573 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ActionRequest.java @@ -25,7 +25,7 @@ import org.elasticsearch.transport.TransportRequest; import java.io.IOException; -public abstract class ActionRequest> extends TransportRequest { +public abstract class ActionRequest extends TransportRequest { public ActionRequest() { super(); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index 7a5e5eef08a..9773410aacc 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -34,6 +34,8 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.ClusterServiceState; +import org.elasticsearch.cluster.service.ClusterStateStatus; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -140,7 +142,8 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< assert waitFor >= 0; final ClusterStateObserver observer = new ClusterStateObserver(clusterService, logger, threadPool.getThreadContext()); - final ClusterState state = observer.observedState(); + final ClusterServiceState observedState = observer.observedState(); + final ClusterState state = observedState.getClusterState(); if (request.timeout().millis() == 0) { listener.onResponse(getResponse(request, state, waitFor, request.timeout().millis() == 0)); return; @@ -148,8 +151,8 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< final int concreteWaitFor = waitFor; final ClusterStateObserver.ChangePredicate validationPredicate = new ClusterStateObserver.ValidationPredicate() { @Override - protected boolean validate(ClusterState newState) { - return newState.status() == ClusterState.ClusterStateStatus.APPLIED && validateRequest(request, newState, concreteWaitFor); + protected boolean validate(ClusterServiceState newState) { + return newState.getClusterStateStatus() == ClusterStateStatus.APPLIED && validateRequest(request, newState.getClusterState(), concreteWaitFor); } }; @@ -171,7 +174,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< listener.onResponse(response); } }; - if (state.status() == ClusterState.ClusterStateStatus.APPLIED && validateRequest(request, state, concreteWaitFor)) { + if (observedState.getClusterStateStatus() == ClusterStateStatus.APPLIED && validateRequest(request, state, concreteWaitFor)) { stateListener.onNewClusterState(state); } else { observer.waitForNextChange(stateListener, validationPredicate, request.timeout()); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/LivenessRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/LivenessRequest.java index 033dd5957d9..d6441bb8e77 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/LivenessRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/LivenessRequest.java @@ -25,7 +25,7 @@ import org.elasticsearch.action.ActionRequestValidationException; * Transport level private response for the transport handler registered under * {@value org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction#NAME} */ -public final class LivenessRequest extends ActionRequest { +public final class LivenessRequest extends ActionRequest { @Override public ActionRequestValidationException validate() { return null; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index 6587f00837f..ce5d92753a8 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; @@ -46,6 +47,7 @@ import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; @@ -118,12 +120,44 @@ public class TransportCancelTasksAction extends TransportTasksAction childNodes = taskManager.cancel(cancellableTask, request.getReason(), banLock::onTaskFinished); if (childNodes != null) { if (childNodes.isEmpty()) { + // The task has no child tasks, so we can return immediately logger.trace("cancelling task {} with no children", cancellableTask.getId()); listener.onResponse(cancellableTask.taskInfo(clusterService.localNode().getId(), false)); } else { + // The task has some child tasks, we need to wait for until ban is set on all nodes logger.trace("cancelling task {} with children on nodes [{}]", cancellableTask.getId(), childNodes); - setBanOnNodes(request.getReason(), cancellableTask, childNodes, banLock); - listener.onResponse(cancellableTask.taskInfo(clusterService.localNode().getId(), false)); + String nodeId = clusterService.localNode().getId(); + AtomicInteger responses = new AtomicInteger(childNodes.size()); + List failures = new ArrayList<>(); + setBanOnNodes(request.getReason(), cancellableTask, childNodes, new ActionListener() { + @Override + public void onResponse(Void aVoid) { + processResponse(); + } + + @Override + public void onFailure(Exception e) { + synchronized (failures) { + failures.add(e); + } + processResponse(); + } + + private void processResponse() { + banLock.onBanSet(); + if (responses.decrementAndGet() == 0) { + if (failures.isEmpty() == false) { + IllegalStateException exception = new IllegalStateException("failed to cancel children of the task [" + + cancellableTask.getId() + "]"); + failures.forEach(exception::addSuppressed); + listener.onFailure(exception); + } else { + listener.onResponse(cancellableTask.taskInfo(nodeId, false)); + } + } + } + }); + } } else { logger.trace("task {} is already cancelled", cancellableTask.getId()); @@ -136,10 +170,10 @@ public class TransportCancelTasksAction extends TransportTasksAction nodes, BanLock banLock) { + private void setBanOnNodes(String reason, CancellableTask task, Set nodes, ActionListener listener) { sendSetBanRequest(nodes, BanParentTaskRequest.createSetBanParentTaskRequest(new TaskId(clusterService.localNode().getId(), task.getId()), reason), - banLock); + listener); } private void removeBanOnNodes(CancellableTask task, Set nodes) { @@ -147,28 +181,29 @@ public class TransportCancelTasksAction extends TransportTasksAction nodes, BanParentTaskRequest request, BanLock banLock) { + private void sendSetBanRequest(Set nodes, BanParentTaskRequest request, ActionListener listener) { ClusterState clusterState = clusterService.state(); for (String node : nodes) { DiscoveryNode discoveryNode = clusterState.getNodes().get(node); if (discoveryNode != null) { // Check if node still in the cluster - logger.debug("Sending ban for tasks with the parent [{}] to the node [{}], ban [{}]", request.parentTaskId, node, + logger.trace("Sending ban for tasks with the parent [{}] to the node [{}], ban [{}]", request.parentTaskId, node, request.ban); transportService.sendRequest(discoveryNode, BAN_PARENT_ACTION_NAME, request, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { @Override public void handleResponse(TransportResponse.Empty response) { - banLock.onBanSet(); + listener.onResponse(null); } @Override public void handleException(TransportException exp) { - banLock.onBanSet(); + logger.warn("Cannot send ban for tasks with the parent [{}] to the node [{}]", request.parentTaskId, node); + listener.onFailure(exp); } }); } else { - banLock.onBanSet(); + listener.onResponse(null); logger.debug("Cannot send ban for tasks with the parent [{}] to the node [{}] - the node no longer in the cluster", request.parentTaskId, node); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequest.java index efbc9679e71..07d40b5ffca 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequest.java @@ -33,7 +33,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request to get node tasks */ -public class GetTaskRequest extends ActionRequest { +public class GetTaskRequest extends ActionRequest { private TaskId taskId = TaskId.EMPTY_TASK_ID; private boolean waitForCompletion = false; private TimeValue timeout = null; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java index 967ea31c84a..819d2de999c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; /** Request the mappings of specific fields */ -public class GetFieldMappingsRequest extends ActionRequest implements IndicesRequest.Replaceable { +public class GetFieldMappingsRequest extends ActionRequest implements IndicesRequest.Replaceable { protected boolean local = false; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java index c8b10af9a8f..0f396afa551 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java @@ -32,7 +32,7 @@ public class PutMappingClusterStateUpdateRequest extends IndicesClusterStateUpda private boolean updateAllTypes = false; - PutMappingClusterStateUpdateRequest() { + public PutMappingClusterStateUpdateRequest() { } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 5be044ea585..ae9f2a38060 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.indices.template.put; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.alias.Alias; @@ -32,6 +33,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -41,10 +44,13 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; @@ -56,11 +62,13 @@ import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; */ public class PutIndexTemplateRequest extends MasterNodeRequest implements IndicesRequest { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(PutIndexTemplateRequest.class)); + private String name; private String cause = ""; - private String template; + private List indexPatterns; private int order; @@ -92,8 +100,8 @@ public class PutIndexTemplateRequest extends MasterNodeRequest indexPatterns) { + this.indexPatterns = indexPatterns; return this; } - public String template() { - return this.template; + public List patterns() { + return this.indexPatterns; } public PutIndexTemplateRequest order(int order) { @@ -142,7 +150,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequesttrue to force only creation, not an update of an index template. If it already - * exists, it will fail with an {@link org.elasticsearch.indices.IndexTemplateAlreadyExistsException}. + * exists, it will fail with an {@link IllegalArgumentException}. */ public PutIndexTemplateRequest create(boolean create) { this.create = create; @@ -286,7 +294,20 @@ public class PutIndexTemplateRequest extends MasterNodeRequest entry : source.entrySet()) { String name = entry.getKey(); if (name.equals("template")) { - template(entry.getValue().toString()); + // This is needed to allow for bwc (beats, logstash) with pre-5.0 templates (#21009) + if(entry.getValue() instanceof String) { + DEPRECATION_LOGGER.deprecated("Deprecated field [template] used, replaced by [index_patterns]"); + patterns(Collections.singletonList((String) entry.getValue())); + } + } else if (name.equals("index_patterns")) { + if(entry.getValue() instanceof String) { + patterns(Collections.singletonList((String) entry.getValue())); + } else if (entry.getValue() instanceof List) { + List elements = ((List) entry.getValue()).stream().map(Object::toString).collect(Collectors.toList()); + patterns(elements); + } else { + throw new IllegalArgumentException("Malformed [template] value, should be a string or a list of strings"); + } } else if (name.equals("order")) { order(XContentMapValues.nodeIntegerValue(entry.getValue(), order())); } else if ("version".equals(name)) { @@ -295,7 +316,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest) entry.getValue()); @@ -436,7 +457,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest 0 ? indexPatterns.get(0) : ""); + } out.writeInt(order); out.writeBoolean(create); writeSettingsToStream(settings, out); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java index 77343277f63..c1db96ae7ce 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java @@ -25,6 +25,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import java.util.Collections; +import java.util.List; import java.util.Map; public class PutIndexTemplateRequestBuilder @@ -39,10 +41,20 @@ public class PutIndexTemplateRequestBuilder } /** - * Sets the template match expression that will be used to match on indices created. + * Sets the match expression that will be used to match on indices created. + * + * @deprecated Replaced by {@link #setPatterns(List)} */ - public PutIndexTemplateRequestBuilder setTemplate(String template) { - request.template(template); + @Deprecated + public PutIndexTemplateRequestBuilder setTemplate(String indexPattern) { + return setPatterns(Collections.singletonList(indexPattern)); + } + + /** + * Sets the match expression that will be used to match on indices created. + */ + public PutIndexTemplateRequestBuilder setPatterns(List indexPatterns) { + request.patterns(indexPatterns); return this; } @@ -64,7 +76,7 @@ public class PutIndexTemplateRequestBuilder /** * Set to true to force only creation, not an update of an index template. If it already - * exists, it will fail with an {@link org.elasticsearch.indices.IndexTemplateAlreadyExistsException}. + * exists, it will fail with an {@link IllegalArgumentException}. */ public PutIndexTemplateRequestBuilder setCreate(boolean create) { request.create(create); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java index 77746b395e1..342b2397773 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java @@ -79,7 +79,7 @@ public class TransportPutIndexTemplateAction extends TransportMasterNodeAction

implements CompositeIndicesRequest, WriteRequest { +public class BulkRequest extends ActionRequest implements CompositeIndicesRequest, WriteRequest { private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(BulkRequest.class)); @@ -170,7 +170,7 @@ public class BulkRequest extends ActionRequest implements Composite sizeInBytes += request.upsertRequest().source().length(); } if (request.script() != null) { - sizeInBytes += request.script().getScript().length() * 2; + sizeInBytes += request.script().getIdOrCode().length() * 2; } return this; } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 01528c7d228..12e0b2a72ac 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -142,10 +142,18 @@ public class TransportShardBulkAction extends TransportWriteAction implements Iterable, CompositeIndicesRequest, RealtimeRequest { +public class MultiGetRequest extends ActionRequest implements Iterable, CompositeIndicesRequest, RealtimeRequest { /** * A single get item. diff --git a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index a6adaa12fef..8019bb977ad 100644 --- a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -142,9 +142,18 @@ public class TransportIndexAction extends TransportWriteAction, Response extends ActionResponse> void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain) { + public void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain) { switch (action) { case IndexAction.NAME: IndexRequest indexRequest = (IndexRequest) request; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index 5d2aea389dc..a3928c17fc7 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -54,7 +54,7 @@ public final class IngestProxyActionFilter implements ActionFilter { } @Override - public , Response extends ActionResponse> void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain) { + public void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain) { Action ingestAction; switch (action) { case IndexAction.NAME: diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index a63f7a30dbe..c9761034418 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -37,7 +37,7 @@ import java.util.Map; import static org.elasticsearch.ingest.IngestDocument.MetaData; -public class SimulatePipelineRequest extends ActionRequest { +public class SimulatePipelineRequest extends ActionRequest { private String id; private boolean verbose; diff --git a/core/src/main/java/org/elasticsearch/action/main/MainRequest.java b/core/src/main/java/org/elasticsearch/action/main/MainRequest.java index 1484bc2a3e9..1736e56a8dc 100644 --- a/core/src/main/java/org/elasticsearch/action/main/MainRequest.java +++ b/core/src/main/java/org/elasticsearch/action/main/MainRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.main; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -public class MainRequest extends ActionRequest { +public class MainRequest extends ActionRequest { @Override public ActionRequestValidationException validate() { diff --git a/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java b/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java index f2f26d655d3..23c5c3747fb 100644 --- a/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java @@ -31,7 +31,7 @@ import java.util.List; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class ClearScrollRequest extends ActionRequest { +public class ClearScrollRequest extends ActionRequest { private List scrollIds; diff --git a/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java b/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java index 08a1ec5b3de..b4f0f932eed 100644 --- a/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java @@ -36,7 +36,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A multi search API request. */ -public class MultiSearchRequest extends ActionRequest implements CompositeIndicesRequest { +public class MultiSearchRequest extends ActionRequest implements CompositeIndicesRequest { private int maxConcurrentSearchRequests = 0; private List requests = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 58b2a7d4aa8..6312d051656 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -30,10 +30,8 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; @@ -72,7 +70,7 @@ import java.util.stream.StreamSupport; public class SearchPhaseController extends AbstractComponent { - public static final Comparator> QUERY_RESULT_ORDERING = (o1, o2) -> { + private static final Comparator> QUERY_RESULT_ORDERING = (o1, o2) -> { int i = o1.value.shardTarget().index().compareTo(o2.value.shardTarget().index()); if (i == 0) { i = o1.value.shardTarget().shardId().id() - o2.value.shardTarget().shardId().id(); @@ -80,17 +78,15 @@ public class SearchPhaseController extends AbstractComponent { return i; }; - public static final ScoreDoc[] EMPTY_DOCS = new ScoreDoc[0]; + private static final ScoreDoc[] EMPTY_DOCS = new ScoreDoc[0]; private final BigArrays bigArrays; private final ScriptService scriptService; - private final ClusterService clusterService; - SearchPhaseController(Settings settings, BigArrays bigArrays, ScriptService scriptService, ClusterService clusterService) { + SearchPhaseController(Settings settings, BigArrays bigArrays, ScriptService scriptService) { super(settings); this.bigArrays = bigArrays; this.scriptService = scriptService; - this.clusterService = clusterService; } public AggregatedDfs aggregateDfs(AtomicArray results) { @@ -486,7 +482,7 @@ public class SearchPhaseController extends AbstractComponent { for (AtomicArray.Entry entry : queryResults) { aggregationsList.add((InternalAggregations) entry.value.queryResult().aggregations()); } - ReduceContext reduceContext = new ReduceContext(bigArrays, scriptService, clusterService.state()); + ReduceContext reduceContext = new ReduceContext(bigArrays, scriptService); aggregations = InternalAggregations.reduce(aggregationsList, reduceContext); List pipelineAggregators = firstResult.pipelineAggregators(); if (pipelineAggregators != null) { diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java index de27805b139..ae960dae984 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -49,7 +49,7 @@ import java.util.Objects; * @see org.elasticsearch.client.Client#search(SearchRequest) * @see SearchResponse */ -public final class SearchRequest extends ActionRequest implements IndicesRequest.Replaceable { +public final class SearchRequest extends ActionRequest implements IndicesRequest.Replaceable { private SearchType searchType = SearchType.DEFAULT; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java index 8a171e24a1e..317efe40314 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java @@ -33,7 +33,7 @@ import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class SearchScrollRequest extends ActionRequest { +public class SearchScrollRequest extends ActionRequest { private String scrollId; private Scroll scroll; diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 022519d3d9d..1b818d86eac 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -69,7 +69,7 @@ public class TransportSearchAction extends HandledTransportAction listener) { throw new UnsupportedOperationException("the task parameter is required"); diff --git a/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java b/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java index f536d9e0ceb..4a2c88f75dc 100644 --- a/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java @@ -40,7 +40,7 @@ public interface ActionFilter { * Enables filtering the execution of an action on the request side, either by sending a response through the * {@link ActionListener} or by continuing the execution through the given {@link ActionFilterChain chain} */ - , Response extends ActionResponse> void apply(Task task, String action, Request request, + void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain); /** @@ -62,7 +62,7 @@ public interface ActionFilter { } @Override - public final , Response extends ActionResponse> void apply(Task task, String action, Request request, + public final void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain) { if (apply(action, request, listener)) { chain.proceed(task, action, request, listener); @@ -73,7 +73,7 @@ public interface ActionFilter { * Applies this filter and returns {@code true} if the execution chain should proceed, or {@code false} * if it should be aborted since the filter already handled the request and called the given listener. */ - protected abstract boolean apply(String action, ActionRequest request, ActionListener listener); + protected abstract boolean apply(String action, ActionRequest request, ActionListener listener); @Override public final void apply(String action, Response response, ActionListener listener, diff --git a/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java b/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java index 54f55e187a9..29991451f2e 100644 --- a/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java +++ b/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java @@ -27,7 +27,7 @@ import org.elasticsearch.tasks.Task; /** * A filter chain allowing to continue and process the transport action request */ -public interface ActionFilterChain, Response extends ActionResponse> { +public interface ActionFilterChain { /** * Continue processing the request. Should only be called if a response has not been sent through diff --git a/core/src/main/java/org/elasticsearch/action/support/ActiveShardsObserver.java b/core/src/main/java/org/elasticsearch/action/support/ActiveShardsObserver.java index 7217961d899..98481eccfbb 100644 --- a/core/src/main/java/org/elasticsearch/action/support/ActiveShardsObserver.java +++ b/core/src/main/java/org/elasticsearch/action/support/ActiveShardsObserver.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.ClusterServiceState; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -69,14 +70,14 @@ public class ActiveShardsObserver extends AbstractComponent { } final ClusterStateObserver observer = new ClusterStateObserver(clusterService, logger, threadPool.getThreadContext()); - if (activeShardCount.enoughShardsActive(observer.observedState(), indexName)) { + if (activeShardCount.enoughShardsActive(observer.observedState().getClusterState(), indexName)) { onResult.accept(true); } else { final ClusterStateObserver.ChangePredicate shardsAllocatedPredicate = new ClusterStateObserver.ValidationPredicate() { @Override - protected boolean validate(final ClusterState newState) { - return activeShardCount.enoughShardsActive(newState, indexName); + protected boolean validate(final ClusterServiceState newState) { + return activeShardCount.enoughShardsActive(newState.getClusterState(), indexName); } }; diff --git a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index 0a53b63b662..68b699cb110 100644 --- a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -35,7 +35,7 @@ import java.util.function.Supplier; /** * A TransportAction that self registers a handler into the transport service */ -public abstract class HandledTransportAction, Response extends ActionResponse> +public abstract class HandledTransportAction extends TransportAction { protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, diff --git a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java index b348d2ec317..dbd08aa376f 100644 --- a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -38,7 +38,7 @@ import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.action.support.PlainActionFuture.newFuture; -public abstract class TransportAction, Response extends ActionResponse> extends AbstractComponent { +public abstract class TransportAction extends AbstractComponent { protected final ThreadPool threadPool; protected final String actionName; @@ -148,7 +148,7 @@ public abstract class TransportAction, Re protected abstract void doExecute(Request request, ActionListener listener); - private static class RequestFilterChain, Response extends ActionResponse> + private static class RequestFilterChain implements ActionFilterChain { private final TransportAction action; @@ -184,7 +184,7 @@ public abstract class TransportAction, Re } } - private static class ResponseFilterChain, Response extends ActionResponse> + private static class ResponseFilterChain implements ActionFilterChain { private final ActionFilter[] filters; diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java index b6ab85c0b18..a04d2edc8dc 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -public class BroadcastRequest> extends ActionRequest implements IndicesRequest.Replaceable { +public class BroadcastRequest> extends ActionRequest implements IndicesRequest.Replaceable { protected String[] indices; private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed(); diff --git a/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java b/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java index efbcadf445f..6f2ce6c4ef9 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java @@ -29,7 +29,7 @@ import java.io.IOException; /** * A based request for master based operation. */ -public abstract class MasterNodeRequest> extends ActionRequest { +public abstract class MasterNodeRequest> extends ActionRequest { public static final TimeValue DEFAULT_MASTER_NODE_TIMEOUT = TimeValue.timeValueSeconds(30); diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index a664c325a4b..47c3f6cf8e0 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.ClusterServiceState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; @@ -112,8 +113,8 @@ public abstract class TransportMasterNodeAction> extends ActionRequest { +public abstract class BaseNodesRequest> extends ActionRequest { /** * the list of nodesIds that will be used to resolve this request and {@link #concreteNodes} diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 3582f5f5aaf..6cc063d5af1 100644 --- a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -106,17 +106,18 @@ public abstract class TransportNodesAction responses = new ArrayList<>(); final List failures = new ArrayList<>(); + final boolean accumulateExceptions = accumulateExceptions(); for (int i = 0; i < nodesResponses.length(); ++i) { Object response = nodesResponses.get(i); - if (nodeResponseClass.isInstance(response)) { - responses.add(nodeResponseClass.cast(response)); - } else if (response instanceof FailedNodeException) { - failures.add((FailedNodeException)response); + if (response instanceof FailedNodeException) { + if (accumulateExceptions) { + failures.add((FailedNodeException)response); + } else { + logger.warn("not accumulating exceptions, excluding exception from response", (FailedNodeException)response); + } } else { - logger.warn("ignoring unexpected response [{}] of type [{}], expected [{}] or [{}]", - response, response != null ? response.getClass().getName() : null, - nodeResponseClass.getSimpleName(), FailedNodeException.class.getSimpleName()); + responses.add(nodeResponseClass.cast(response)); } } @@ -243,9 +244,7 @@ public abstract class TransportNodesAction) () -> new ParameterizedMessage("failed to execute on node [{}]", nodeId), t); } - if (accumulateExceptions()) { - responses.set(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t)); - } + responses.set(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t)); if (counter.incrementAndGet() == responses.length()) { finishHim(); } diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java index 596d2581a79..9586bab3937 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java @@ -43,7 +43,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; * Requests that are run on a particular replica, first on the primary and then on the replicas like {@link IndexRequest} or * {@link TransportShardRefreshAction}. */ -public abstract class ReplicationRequest> extends ActionRequest +public abstract class ReplicationRequest> extends ActionRequest implements IndicesRequest { public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(1, TimeUnit.MINUTES); diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 6c19e526427..c44b9e73e2b 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -622,7 +622,7 @@ public abstract class TransportReplicationAction< @Override protected void doRun() { setPhase(task, "routing"); - final ClusterState state = observer.observedState(); + final ClusterState state = observer.observedState().getClusterState(); if (handleBlockExceptions(state)) { return; } diff --git a/core/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequest.java b/core/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequest.java index a6bb0f8e0a1..791617231b5 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequest.java @@ -32,7 +32,7 @@ import org.elasticsearch.index.shard.ShardId; import java.io.IOException; import java.util.concurrent.TimeUnit; -public abstract class InstanceShardOperationRequest> extends ActionRequest +public abstract class InstanceShardOperationRequest> extends ActionRequest implements IndicesRequest { public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(1, TimeUnit.MINUTES); diff --git a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index 35d3ee111c3..c5014adf570 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -124,9 +124,10 @@ public abstract class TransportInstanceSingleOperationAction> extends ActionRequest implements IndicesRequest { +public abstract class SingleShardRequest> extends ActionRequest implements IndicesRequest { public static final IndicesOptions INDICES_OPTIONS = IndicesOptions.strictSingleIndexNoExpandForbidClosed(); diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java index 18eea411b63..e912eebb4fb 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java @@ -36,7 +36,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A base class for task requests */ -public class BaseTasksRequest> extends ActionRequest { +public class BaseTasksRequest> extends ActionRequest { public static final String[] ALL_ACTIONS = Strings.EMPTY_ARRAY; diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java index 3cd73226e73..da9dae6759d 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java @@ -41,7 +41,7 @@ import java.util.Iterator; import java.util.List; import java.util.Set; -public class MultiTermVectorsRequest extends ActionRequest implements Iterable, CompositeIndicesRequest, RealtimeRequest { +public class MultiTermVectorsRequest extends ActionRequest implements Iterable, CompositeIndicesRequest, RealtimeRequest { String preference; List requests = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 56d964362a7..0e37b6ff064 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -116,7 +116,7 @@ public class UpdateHelper extends AbstractComponent { if (!"create".equals(scriptOpChoice)) { if (!"none".equals(scriptOpChoice)) { logger.warn("Used upsert operation [{}] for script [{}], doing nothing...", scriptOpChoice, - request.script.getScript()); + request.script.getIdOrCode()); } UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP); @@ -242,7 +242,7 @@ public class UpdateHelper extends AbstractComponent { update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef())); return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType); } else { - logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getScript()); + logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getIdOrCode()); UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP); return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType); } @@ -251,7 +251,7 @@ public class UpdateHelper extends AbstractComponent { private Map executeScript(Script script, Map ctx) { try { if (scriptService != null) { - ExecutableScript executableScript = scriptService.executable(script, ScriptContext.Standard.UPDATE, Collections.emptyMap()); + ExecutableScript executableScript = scriptService.executable(script, ScriptContext.Standard.UPDATE); executableScript.setNextVar("ctx", ctx); executableScript.run(); // we need to unwrap the ctx... diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index 54c435da366..f59fd142e71 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -224,7 +224,7 @@ public class UpdateRequest extends InstanceShardOperationRequest */ @Deprecated public String scriptString() { - return this.script == null ? null : this.script.getScript(); + return this.script == null ? null : this.script.getIdOrCode(); } /** @@ -327,13 +327,13 @@ public class UpdateRequest extends InstanceShardOperationRequest private void updateOrCreateScript(String scriptContent, ScriptType type, String lang, Map params) { Script script = script(); if (script == null) { - script = new Script(scriptContent == null ? "" : scriptContent, type == null ? ScriptType.INLINE : type, lang, params); + script = new Script(type == null ? ScriptType.INLINE : type, lang, scriptContent == null ? "" : scriptContent, params); } else { - String newScriptContent = scriptContent == null ? script.getScript() : scriptContent; + String newScriptContent = scriptContent == null ? script.getIdOrCode() : scriptContent; ScriptType newScriptType = type == null ? script.getType() : type; String newScriptLang = lang == null ? script.getLang() : lang; Map newScriptParams = params == null ? script.getParams() : params; - script = new Script(newScriptContent, newScriptType, newScriptLang, newScriptParams); + script = new Script(newScriptType, newScriptLang, newScriptContent, newScriptParams); } script(script); } @@ -347,7 +347,7 @@ public class UpdateRequest extends InstanceShardOperationRequest */ @Deprecated public UpdateRequest script(String script, ScriptType scriptType, @Nullable Map scriptParams) { - this.script = new Script(script, scriptType, null, scriptParams); + this.script = new Script(scriptType, Script.DEFAULT_SCRIPT_LANG, script, scriptParams); return this; } @@ -370,7 +370,7 @@ public class UpdateRequest extends InstanceShardOperationRequest @Deprecated public UpdateRequest script(String script, @Nullable String scriptLang, ScriptType scriptType, @Nullable Map scriptParams) { - this.script = new Script(script, scriptType, scriptLang, scriptParams); + this.script = new Script(scriptType, scriptLang, script, scriptParams); return this; } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 022f7a3c4de..2366f68847d 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -19,9 +19,12 @@ package org.elasticsearch.bootstrap; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.ConsoleAppender; +import org.apache.logging.log4j.core.config.Configurator; import org.apache.lucene.util.Constants; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.StringHelper; @@ -167,6 +170,8 @@ final class Bootstrap { public void run() { try { IOUtils.close(node); + LoggerContext context = (LoggerContext) LogManager.getContext(false); + Configurator.shutdown(context); } catch (IOException ex) { throw new ElasticsearchException("failed to stop node", ex); } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java index 308f02015b3..06c334fd42a 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java @@ -48,11 +48,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; /** - * We enforce limits once any network host is configured. In this case we assume the node is running in production - * and all production limit checks must pass. This should be extended as we go to settings like: - * - discovery.zen.ping.unicast.hosts is set if we use zen disco - * - ensure we can write in all data directories - * - fail if the default cluster.name is used, if this is setup on network a real clustername should be used? + * We enforce bootstrap checks once a node has the transport protocol bound to a non-loopback interface. In this case we assume the node is + * running in production and all bootstrap checks must pass. */ final class BootstrapCheck { @@ -60,8 +57,7 @@ final class BootstrapCheck { } /** - * checks the current limits against the snapshot or release build - * checks + * Executes the bootstrap checks if the node has the transport protocol bound to a non-loopback interface. * * @param settings the current node settings * @param boundTransportAddress the node network bindings @@ -74,15 +70,12 @@ final class BootstrapCheck { } /** - * executes the provided checks and fails the node if - * enforceLimits is true, otherwise logs warnings + * Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. * - * @param enforceLimits true if the checks should be enforced or - * otherwise warned - * @param checks the checks to execute - * @param nodeName the node name to be used as a logging prefix + * @param enforceLimits {@code true} if the checks should be enforced or otherwise warned + * @param checks the checks to execute + * @param nodeName the node name to be used as a logging prefix */ - // visible for testing static void check( final boolean enforceLimits, final List checks, @@ -91,13 +84,11 @@ final class BootstrapCheck { } /** - * executes the provided checks and fails the node if - * enforceLimits is true, otherwise logs warnings + * Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. * - * @param enforceLimits true if the checks should be enforced or - * otherwise warned - * @param checks the checks to execute - * @param logger the logger to + * @param enforceLimits {@code true} if the checks should be enforced or otherwise warned + * @param checks the checks to execute + * @param logger the logger to */ static void check( final boolean enforceLimits, @@ -140,12 +131,11 @@ final class BootstrapCheck { } /** - * Tests if the checks should be enforced + * Tests if the checks should be enforced. * * @param boundTransportAddress the node network bindings - * @return true if the checks should be enforced + * @return {@code true} if the checks should be enforced */ - // visible for testing static boolean enforceLimits(BoundTransportAddress boundTransportAddress) { Predicate isLoopbackOrLinkLocalAddress = t -> t.address().getAddress().isLinkLocalAddress() || t.address().getAddress().isLoopbackAddress(); @@ -179,19 +169,19 @@ final class BootstrapCheck { } /** - * Encapsulates a limit check + * Encapsulates a bootstrap check. */ interface Check { /** - * test if the node fails the check + * Test if the node fails the check. * - * @return true if the node failed the check + * @return {@code true} if the node failed the check */ boolean check(); /** - * the message for a failed check + * The error message for a failed check. * * @return the error message on check failure */ @@ -271,7 +261,7 @@ final class BootstrapCheck { public final String errorMessage() { return String.format( Locale.ROOT, - "max file descriptors [%d] for elasticsearch process likely too low, increase to at least [%d]", + "max file descriptors [%d] for elasticsearch process is too low, increase to at least [%d]", getMaxFileDescriptorCount(), limit ); @@ -323,7 +313,7 @@ final class BootstrapCheck { public String errorMessage() { return String.format( Locale.ROOT, - "max number of threads [%d] for user [%s] likely too low, increase to at least [%d]", + "max number of threads [%d] for user [%s] is too low, increase to at least [%d]", getMaxNumberOfThreads(), BootstrapInfo.getSystemProperties().get("user.name"), maxNumberOfThreadsThreshold); @@ -347,7 +337,7 @@ final class BootstrapCheck { public String errorMessage() { return String.format( Locale.ROOT, - "max size virtual memory [%d] for user [%s] likely too low, increase to [unlimited]", + "max size virtual memory [%d] for user [%s] is too low, increase to [unlimited]", getMaxSizeVirtualMemory(), BootstrapInfo.getSystemProperties().get("user.name")); } @@ -377,7 +367,7 @@ final class BootstrapCheck { public String errorMessage() { return String.format( Locale.ROOT, - "max virtual memory areas vm.max_map_count [%d] likely too low, increase to at least [%d]", + "max virtual memory areas vm.max_map_count [%d] is too low, increase to at least [%d]", getMaxMapCount(), limit); } @@ -561,12 +551,14 @@ final class BootstrapCheck { public boolean check() { if ("Oracle Corporation".equals(jvmVendor()) && isJava8() && isG1GCEnabled()) { final String jvmVersion = jvmVersion(); + // HotSpot versions on Java 8 match this regular expression; note that this changes with Java 9 after JEP-223 final Pattern pattern = Pattern.compile("(\\d+)\\.(\\d+)-b\\d+"); final Matcher matcher = pattern.matcher(jvmVersion); final boolean matches = matcher.matches(); assert matches : jvmVersion; final int major = Integer.parseInt(matcher.group(1)); final int update = Integer.parseInt(matcher.group(2)); + // HotSpot versions for Java 8 have major version 25, the bad versions are all versions prior to update 40 return major == 25 && update < 40; } else { return false; @@ -590,9 +582,10 @@ final class BootstrapCheck { return Constants.JVM_VERSION; } - // visible for tests + // visible for testing boolean isJava8() { - return Constants.JVM_SPEC_VERSION.equals("1.8"); + assert "Oracle Corporation".equals(jvmVendor()); + return JavaVersion.current().equals(JavaVersion.parse("1.8")); } @Override diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java index e45e42757c2..2b8fd325d81 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -20,10 +20,10 @@ package org.elasticsearch.bootstrap; import org.elasticsearch.SecureSM; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.http.HttpTransportSettings; @@ -266,12 +266,14 @@ final class Security { } } - static void addBindPermissions(Permissions policy, Settings settings) throws IOException { - // http is simple - String httpRange = HttpTransportSettings.SETTING_HTTP_PORT.get(settings).getPortRangeString(); - // listen is always called with 'localhost' but use wildcard to be sure, no name service is consulted. - // see SocketPermission implies() code - policy.add(new SocketPermission("*:" + httpRange, "listen,resolve")); + /** + * Add dynamic {@link SocketPermission}s based on HTTP and transport settings. + * + * @param policy the {@link Permissions} instance to apply the dynamic {@link SocketPermission}s to. + * @param settings the {@link Settings} instance to read the HTTP and transport settings from + */ + static void addBindPermissions(Permissions policy, Settings settings) { + addSocketPermissionForHttp(policy, settings); // transport is waaaay overengineered Map profiles = TransportSettings.TRANSPORT_PROFILES_SETTING.get(settings).getAsGroups(); if (!profiles.containsKey(TransportSettings.DEFAULT_PROFILE)) { @@ -284,16 +286,76 @@ final class Security { for (Map.Entry entry : profiles.entrySet()) { Settings profileSettings = entry.getValue(); String name = entry.getKey(); - String transportRange = profileSettings.get("port", TransportSettings.PORT.get(settings)); // a profile is only valid if its the default profile, or if it has an actual name and specifies a port boolean valid = TransportSettings.DEFAULT_PROFILE.equals(name) || (Strings.hasLength(name) && profileSettings.get("port") != null); if (valid) { - // listen is always called with 'localhost' but use wildcard to be sure, no name service is consulted. - // see SocketPermission implies() code - policy.add(new SocketPermission("*:" + transportRange, "listen,resolve")); + addSocketPermissionForTransportProfile(policy, profileSettings, settings); } } + + for (final Settings tribeNodeSettings : settings.getGroups("tribe", true).values()) { + // tribe nodes have HTTP disabled by default, so we check if HTTP is enabled before granting + if (NetworkModule.HTTP_ENABLED.exists(tribeNodeSettings) && NetworkModule.HTTP_ENABLED.get(tribeNodeSettings)) { + addSocketPermissionForHttp(policy, tribeNodeSettings); + } + addSocketPermissionForTransport(policy, tribeNodeSettings); + } + } + + /** + * Add dynamic {@link SocketPermission} based on HTTP settings. + * + * @param policy the {@link Permissions} instance to apply the dynamic {@link SocketPermission}s to. + * @param settings the {@link Settings} instance to read the HTTP settingsfrom + */ + private static void addSocketPermissionForHttp(final Permissions policy, final Settings settings) { + // http is simple + final String httpRange = HttpTransportSettings.SETTING_HTTP_PORT.get(settings).getPortRangeString(); + addSocketPermissionForPortRange(policy, httpRange); + } + + /** + * Add dynamic {@link SocketPermission} based on transport settings. This method will first check if there is a port range specified in + * the transport profile specified by {@code profileSettings} and will fall back to {@code settings}. + * + * @param policy the {@link Permissions} instance to apply the dynamic {@link SocketPermission}s to + * @param profileSettings the {@link Settings} to read the transport profile from + * @param settings the {@link Settings} instance to read the transport settings from + */ + private static void addSocketPermissionForTransportProfile( + final Permissions policy, + final Settings profileSettings, + final Settings settings) { + final String transportRange = profileSettings.get("port"); + if (transportRange != null) { + addSocketPermissionForPortRange(policy, transportRange); + } else { + addSocketPermissionForTransport(policy, settings); + } + } + + /** + * Add dynamic {@link SocketPermission} based on transport settings. + * + * @param policy the {@link Permissions} instance to apply the dynamic {@link SocketPermission}s to + * @param settings the {@link Settings} instance to read the transport settings from + */ + private static void addSocketPermissionForTransport(final Permissions policy, final Settings settings) { + final String transportRange = TransportSettings.PORT.get(settings); + addSocketPermissionForPortRange(policy, transportRange); + } + + /** + * Add dynamic {@link SocketPermission} for the specified port range. + * + * @param policy the {@link Permissions} instance to apply the dynamic {@link SocketPermission} to. + * @param portRange the port range + */ + private static void addSocketPermissionForPortRange(final Permissions policy, final String portRange) { + // listen is always called with 'localhost' but use wildcard to be sure, no name service is consulted. + // see SocketPermission implies() code + policy.add(new SocketPermission("*:" + portRange, "listen,resolve")); } /** diff --git a/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java b/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java index d9ddc56d48a..84438ff6d1a 100644 --- a/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java +++ b/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java @@ -40,7 +40,7 @@ public interface ElasticsearchClient { * @param The request builder type. * @return A future allowing to get back the response. */ - , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + > ActionFuture execute( final Action action, final Request request); /** @@ -53,7 +53,7 @@ public interface ElasticsearchClient { * @param The response type. * @param The request builder type. */ - , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + > void execute( final Action action, final Request request, ActionListener listener); /** @@ -65,7 +65,7 @@ public interface ElasticsearchClient { * @param The request builder. * @return The request builder, that can, at a later stage, execute the request. */ - , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + > RequestBuilder prepareExecute( final Action action); /** diff --git a/core/src/main/java/org/elasticsearch/client/FilterClient.java b/core/src/main/java/org/elasticsearch/client/FilterClient.java index d0f52282c76..23d3c2c3d0c 100644 --- a/core/src/main/java/org/elasticsearch/client/FilterClient.java +++ b/core/src/main/java/org/elasticsearch/client/FilterClient.java @@ -62,7 +62,7 @@ public abstract class FilterClient extends AbstractClient { } @Override - protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( + protected > void doExecute( Action action, Request request, ActionListener listener) { in().execute(action, request, listener); } diff --git a/core/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java b/core/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java index 44ba2b76e43..62843c41b70 100644 --- a/core/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java +++ b/core/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java @@ -58,7 +58,7 @@ public class ParentTaskAssigningClient extends FilterClient { } @Override - protected < Request extends ActionRequest, + protected < Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder > void doExecute(Action action, Request request, ActionListener listener) { diff --git a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java index 5fc2319284d..6c3aa071ba3 100644 --- a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java +++ b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java @@ -56,7 +56,7 @@ public class NodeClient extends AbstractClient { } @Override - public < Request extends ActionRequest, + public < Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder > void doExecute(Action action, Request request, ActionListener listener) { @@ -69,7 +69,7 @@ public class NodeClient extends AbstractClient { * method if you don't need access to the task when listening for the response. This is the method used to implement the {@link Client} * interface. */ - public < Request extends ActionRequest, + public < Request extends ActionRequest, Response extends ActionResponse > Task executeLocally(GenericAction action, Request request, ActionListener listener) { return transportAction(action).execute(request, listener); @@ -79,7 +79,7 @@ public class NodeClient extends AbstractClient { * Execute an {@link Action} locally, returning that {@link Task} used to track it, and linking an {@link TaskListener}. Prefer this * method if you need access to the task when listening for the response. */ - public < Request extends ActionRequest, + public < Request extends ActionRequest, Response extends ActionResponse > Task executeLocally(GenericAction action, Request request, TaskListener listener) { return transportAction(action).execute(request, listener); @@ -89,7 +89,7 @@ public class NodeClient extends AbstractClient { * Get the {@link TransportAction} for an {@link Action}, throwing exceptions if the action isn't available. */ @SuppressWarnings("unchecked") - private < Request extends ActionRequest, + private < Request extends ActionRequest, Response extends ActionResponse > TransportAction transportAction(GenericAction action) { if (actions == null) { diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 006040b8e16..075fbf1fad6 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -377,13 +377,13 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public final , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + public final > RequestBuilder prepareExecute( final Action action) { return action.newRequestBuilder(this); } @Override - public final , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + public final > ActionFuture execute( Action action, Request request) { PlainActionFuture actionFuture = PlainActionFuture.newFuture(); execute(action, request, actionFuture); @@ -394,13 +394,13 @@ public abstract class AbstractClient extends AbstractComponent implements Client * This is the single execution point of *all* clients. */ @Override - public final , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + public final > void execute( Action action, Request request, ActionListener listener) { listener = threadedWrapper.wrap(listener); doExecute(action, request, listener); } - protected abstract , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute(final Action action, final Request request, ActionListener listener); + protected abstract > void doExecute(final Action action, final Request request, ActionListener listener); @Override public ActionFuture index(final IndexRequest request) { @@ -696,19 +696,19 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + public > ActionFuture execute( Action action, Request request) { return client.execute(action, request); } @Override - public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + public > void execute( Action action, Request request, ActionListener listener) { client.execute(action, request, listener); } @Override - public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + public > RequestBuilder prepareExecute( Action action) { return client.prepareExecute(action); } @@ -1212,19 +1212,19 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + public > ActionFuture execute( Action action, Request request) { return client.execute(action, request); } @Override - public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + public > void execute( Action action, Request request, ActionListener listener) { client.execute(action, request, listener); } @Override - public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + public > RequestBuilder prepareExecute( Action action) { return client.prepareExecute(action); } @@ -1745,7 +1745,7 @@ public abstract class AbstractClient extends AbstractComponent implements Client public Client filterWithHeader(Map headers) { return new FilterClient(this) { @Override - protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute(Action action, Request request, ActionListener listener) { + protected > void doExecute(Action action, Request request, ActionListener listener) { ThreadContext threadContext = threadPool().getThreadContext(); try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(headers)) { super.doExecute(action, request, listener); diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 7ef7f400a53..673693c7c38 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -329,7 +329,7 @@ public abstract class TransportClient extends AbstractClient { } @Override - protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute(Action action, Request request, ActionListener listener) { + protected > void doExecute(Action action, Request request, ActionListener listener) { proxy.execute(action, request, listener); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 9bc55054a1d..930991c443b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -86,9 +86,6 @@ public class ClusterModule extends AbstractModule { final Collection allocationDeciders; final ShardsAllocator shardsAllocator; - // pkg private so tests can mock - Class clusterInfoServiceImpl = InternalClusterInfoService.class; - public ClusterModule(Settings settings, ClusterService clusterService, List clusterPlugins) { this.settings = settings; this.allocationDeciders = createAllocationDeciders(settings, clusterService.getClusterSettings(), clusterPlugins); @@ -159,7 +156,6 @@ public class ClusterModule extends AbstractModule { @Override protected void configure() { - bind(ClusterInfoService.class).to(clusterInfoServiceImpl).asEagerSingleton(); bind(GatewayAllocator.class).asEagerSingleton(); bind(AllocationService.class).asEagerSingleton(); bind(ClusterService.class).toInstance(clusterService); diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java index 82fb6476264..7699e6fff87 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -64,10 +64,9 @@ import java.util.Set; /** * Represents the current state of the cluster. *

- * The cluster state object is immutable with an - * exception of the {@link RoutingNodes} structure, which is built on demand from the {@link RoutingTable}, - * and cluster state {@link #status}, which is updated during cluster state publishing and applying - * processing. The cluster state can be updated only on the master node. All updates are performed by on a + * The cluster state object is immutable with an exception of the {@link RoutingNodes} structure, which is + * built on demand from the {@link RoutingTable}. + * The cluster state can be updated only on the master node. All updates are performed by on a * single thread and controlled by the {@link ClusterService}. After every update the * {@link Discovery#publish} method publishes new version of the cluster state to all other nodes in the * cluster. The actual publishing mechanism is delegated to the {@link Discovery#publish} method and depends on @@ -89,23 +88,6 @@ public class ClusterState implements ToXContent, Diffable { public static final ClusterState PROTO = builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).build(); - public static enum ClusterStateStatus { - UNKNOWN((byte) 0), - RECEIVED((byte) 1), - BEING_APPLIED((byte) 2), - APPLIED((byte) 3); - - private final byte id; - - ClusterStateStatus(byte id) { - this.id = id; - } - - public byte id() { - return this.id; - } - } - public interface Custom extends Diffable, ToXContent { String type(); @@ -166,8 +148,6 @@ public class ClusterState implements ToXContent, Diffable { // built on demand private volatile RoutingNodes routingNodes; - private volatile ClusterStateStatus status; - public ClusterState(long version, String stateUUID, ClusterState state) { this(state.clusterName, version, stateUUID, state.metaData(), state.routingTable(), state.nodes(), state.blocks(), state.customs(), false); } @@ -181,19 +161,9 @@ public class ClusterState implements ToXContent, Diffable { this.nodes = nodes; this.blocks = blocks; this.customs = customs; - this.status = ClusterStateStatus.UNKNOWN; this.wasReadFromDiff = wasReadFromDiff; } - public ClusterStateStatus status() { - return status; - } - - public ClusterState status(ClusterStateStatus newStatus) { - this.status = newStatus; - return this; - } - public long version() { return this.version; } @@ -425,7 +395,7 @@ public class ClusterState implements ToXContent, Diffable { IndexTemplateMetaData templateMetaData = cursor.value; builder.startObject(templateMetaData.name()); - builder.field("template", templateMetaData.template()); + builder.field("index_patterns", templateMetaData.patterns()); builder.field("order", templateMetaData.order()); builder.startObject("settings"); diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java index e18ec5543d9..17dec4cf504 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java @@ -22,6 +22,8 @@ package org.elasticsearch.cluster; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.ClusterStateStatus; +import org.elasticsearch.cluster.service.ClusterServiceState; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -50,7 +52,7 @@ public class ClusterStateObserver { volatile TimeValue timeOutValue; - final AtomicReference lastObservedState; + final AtomicReference lastObservedState; final TimeoutClusterStateListener clusterStateListener = new ObserverClusterStateListener(); // observingContext is not null when waiting on cluster state changes final AtomicReference observingContext = new AtomicReference<>(null); @@ -69,7 +71,7 @@ public class ClusterStateObserver { */ public ClusterStateObserver(ClusterService clusterService, @Nullable TimeValue timeout, Logger logger, ThreadContext contextHolder) { this.clusterService = clusterService; - this.lastObservedState = new AtomicReference<>(new ObservedState(clusterService.state())); + this.lastObservedState = new AtomicReference<>(clusterService.clusterServiceState()); this.timeOutValue = timeout; if (timeOutValue != null) { this.startTimeNS = System.nanoTime(); @@ -78,11 +80,11 @@ public class ClusterStateObserver { this.contextHolder = contextHolder; } - /** last cluster state observer by this observer. Note that this may not be the current one */ - public ClusterState observedState() { - ObservedState state = lastObservedState.get(); + /** last cluster state and status observed by this observer. Note that this may not be the current one */ + public ClusterServiceState observedState() { + ClusterServiceState state = lastObservedState.get(); assert state != null; - return state.clusterState; + return state; } /** indicates whether this observer has timedout */ @@ -126,7 +128,7 @@ public class ClusterStateObserver { logger.trace("observer timed out. notifying listener. timeout setting [{}], time since start [{}]", timeOutValue, new TimeValue(timeSinceStartMS)); // update to latest, in case people want to retry timedOut = true; - lastObservedState.set(new ObservedState(clusterService.state())); + lastObservedState.set(clusterService.clusterServiceState()); listener.onTimeout(timeOutValue); return; } @@ -141,13 +143,13 @@ public class ClusterStateObserver { } // sample a new state - ObservedState newState = new ObservedState(clusterService.state()); - ObservedState lastState = lastObservedState.get(); - if (changePredicate.apply(lastState.clusterState, lastState.status, newState.clusterState, newState.status)) { + ClusterServiceState newState = clusterService.clusterServiceState(); + ClusterServiceState lastState = lastObservedState.get(); + if (changePredicate.apply(lastState, newState)) { // good enough, let's go. logger.trace("observer: sampled state accepted by predicate ({})", newState); lastObservedState.set(newState); - listener.onNewClusterState(newState.clusterState); + listener.onNewClusterState(newState.getClusterState()); } else { logger.trace("observer: sampled state rejected by predicate ({}). adding listener to ClusterService", newState); ObservingContext context = new ObservingContext(new ContextPreservingListener(listener, contextHolder.newStoredContext()), changePredicate); @@ -161,11 +163,11 @@ public class ClusterStateObserver { /** * reset this observer to the give cluster state. Any pending waits will be canceled. */ - public void reset(ClusterState toState) { + public void reset(ClusterServiceState state) { if (observingContext.getAndSet(null) != null) { clusterService.remove(clusterStateListener); } - lastObservedState.set(new ObservedState(toState)); + lastObservedState.set(state); } class ObserverClusterStateListener implements TimeoutClusterStateListener { @@ -180,10 +182,10 @@ public class ClusterStateObserver { if (context.changePredicate.apply(event)) { if (observingContext.compareAndSet(context, null)) { clusterService.remove(this); - ObservedState state = new ObservedState(event.state()); + ClusterServiceState state = new ClusterServiceState(event.state(), ClusterStateStatus.APPLIED); logger.trace("observer: accepting cluster state change ({})", state); lastObservedState.set(state); - context.listener.onNewClusterState(state.clusterState); + context.listener.onNewClusterState(state.getClusterState()); } else { logger.trace("observer: predicate approved change but observing context has changed - ignoring (new cluster state version [{}])", event.state().version()); } @@ -199,15 +201,15 @@ public class ClusterStateObserver { // No need to remove listener as it is the responsibility of the thread that set observingContext to null return; } - ObservedState newState = new ObservedState(clusterService.state()); - ObservedState lastState = lastObservedState.get(); - if (context.changePredicate.apply(lastState.clusterState, lastState.status, newState.clusterState, newState.status)) { + ClusterServiceState newState = clusterService.clusterServiceState(); + ClusterServiceState lastState = lastObservedState.get(); + if (context.changePredicate.apply(lastState, newState)) { // double check we're still listening if (observingContext.compareAndSet(context, null)) { logger.trace("observer: post adding listener: accepting current cluster state ({})", newState); clusterService.remove(this); lastObservedState.set(newState); - context.listener.onNewClusterState(newState.clusterState); + context.listener.onNewClusterState(newState.getClusterState()); } else { logger.trace("observer: postAdded - predicate approved state but observing context has changed - ignoring ({})", newState); } @@ -235,7 +237,7 @@ public class ClusterStateObserver { long timeSinceStartMS = TimeValue.nsecToMSec(System.nanoTime() - startTimeNS); logger.trace("observer: timeout notification from cluster service. timeout setting [{}], time since start [{}]", timeOutValue, new TimeValue(timeSinceStartMS)); // update to latest, in case people want to retry - lastObservedState.set(new ObservedState(clusterService.state())); + lastObservedState.set(clusterService.clusterServiceState()); timedOut = true; context.listener.onTimeout(timeOutValue); } @@ -260,10 +262,8 @@ public class ClusterStateObserver { * * @return true if newState should be accepted */ - boolean apply(ClusterState previousState, - ClusterState.ClusterStateStatus previousStatus, - ClusterState newState, - ClusterState.ClusterStateStatus newStatus); + boolean apply(ClusterServiceState previousState, + ClusterServiceState newState); /** * called to see whether a cluster change should be accepted @@ -277,22 +277,25 @@ public class ClusterStateObserver { public abstract static class ValidationPredicate implements ChangePredicate { @Override - public boolean apply(ClusterState previousState, ClusterState.ClusterStateStatus previousStatus, ClusterState newState, ClusterState.ClusterStateStatus newStatus) { - return (previousState != newState || previousStatus != newStatus) && validate(newState); + public boolean apply(ClusterServiceState previousState, ClusterServiceState newState) { + return (previousState.getClusterState() != newState.getClusterState() || + previousState.getClusterStateStatus() != newState.getClusterStateStatus()) && + validate(newState); } - protected abstract boolean validate(ClusterState newState); + protected abstract boolean validate(ClusterServiceState newState); @Override public boolean apply(ClusterChangedEvent changedEvent) { - return changedEvent.previousState().version() != changedEvent.state().version() && validate(changedEvent.state()); + return changedEvent.previousState().version() != changedEvent.state().version() && + validate(new ClusterServiceState(changedEvent.state(), ClusterStateStatus.APPLIED)); } } public abstract static class EventPredicate implements ChangePredicate { @Override - public boolean apply(ClusterState previousState, ClusterState.ClusterStateStatus previousStatus, ClusterState newState, ClusterState.ClusterStateStatus newStatus) { - return previousState != newState || previousStatus != newStatus; + public boolean apply(ClusterServiceState previousState, ClusterServiceState newState) { + return previousState.getClusterState() != newState.getClusterState() || previousState.getClusterStateStatus() != newState.getClusterStateStatus(); } } @@ -307,21 +310,6 @@ public class ClusterStateObserver { } } - static class ObservedState { - public final ClusterState clusterState; - public final ClusterState.ClusterStateStatus status; - - public ObservedState(ClusterState clusterState) { - this.clusterState = clusterState; - this.status = clusterState.status(); - } - - @Override - public String toString() { - return "version [" + clusterState.version() + "], status [" + status + "]"; - } - } - private static final class ContextPreservingListener implements Listener { private final Listener delegate; private final ThreadContext.StoredContext tempContext; diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index b32e992c5aa..70656bb56bd 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -19,17 +19,21 @@ package org.elasticsearch.cluster; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; -import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -39,7 +43,6 @@ import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -50,11 +53,6 @@ import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ReceiveTimeoutTransportException; -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; - /** * InternalClusterInfoService provides the ClusterInfoService interface, * routinely updated on a timer. The timer can be dynamically changed by @@ -84,29 +82,24 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu private volatile boolean isMaster = false; private volatile boolean enabled; private volatile TimeValue fetchTimeout; - private final TransportNodesStatsAction transportNodesStatsAction; - private final TransportIndicesStatsAction transportIndicesStatsAction; private final ClusterService clusterService; private final ThreadPool threadPool; + private final NodeClient client; private final List listeners = new CopyOnWriteArrayList<>(); - @Inject - public InternalClusterInfoService(Settings settings, ClusterSettings clusterSettings, - TransportNodesStatsAction transportNodesStatsAction, - TransportIndicesStatsAction transportIndicesStatsAction, ClusterService clusterService, - ThreadPool threadPool) { + public InternalClusterInfoService(Settings settings, ClusterService clusterService, ThreadPool threadPool, NodeClient client) { super(settings); this.leastAvailableSpaceUsages = ImmutableOpenMap.of(); this.mostAvailableSpaceUsages = ImmutableOpenMap.of(); this.shardRoutingToDataPath = ImmutableOpenMap.of(); this.shardSizes = ImmutableOpenMap.of(); - this.transportNodesStatsAction = transportNodesStatsAction; - this.transportIndicesStatsAction = transportIndicesStatsAction; this.clusterService = clusterService; this.threadPool = threadPool; + this.client = client; this.updateFrequency = INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.get(settings); this.fetchTimeout = INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.get(settings); this.enabled = DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.get(settings); + ClusterSettings clusterSettings = clusterService.getClusterSettings(); clusterSettings.addSettingsUpdateConsumer(INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING, this::setFetchTimeout); clusterSettings.addSettingsUpdateConsumer(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING, this::setUpdateFrequency); clusterSettings.addSettingsUpdateConsumer(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, this::setEnabled); @@ -259,8 +252,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu nodesStatsRequest.clear(); nodesStatsRequest.fs(true); nodesStatsRequest.timeout(fetchTimeout); - - transportNodesStatsAction.execute(nodesStatsRequest, new LatchedActionListener<>(listener, latch)); + client.admin().cluster().nodesStats(nodesStatsRequest, new LatchedActionListener<>(listener, latch)); return latch; } @@ -274,7 +266,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu indicesStatsRequest.clear(); indicesStatsRequest.store(true); - transportIndicesStatsAction.execute(indicesStatsRequest, new LatchedActionListener<>(listener, latch)); + client.admin().indices().stats(indicesStatsRequest, new LatchedActionListener<>(listener, latch)); return latch; } diff --git a/core/src/main/java/org/elasticsearch/cluster/MasterNodeChangePredicate.java b/core/src/main/java/org/elasticsearch/cluster/MasterNodeChangePredicate.java index afb557e5bbc..0ee5c891282 100644 --- a/core/src/main/java/org/elasticsearch/cluster/MasterNodeChangePredicate.java +++ b/core/src/main/java/org/elasticsearch/cluster/MasterNodeChangePredicate.java @@ -19,18 +19,19 @@ package org.elasticsearch.cluster; +import org.elasticsearch.cluster.service.ClusterServiceState; + public enum MasterNodeChangePredicate implements ClusterStateObserver.ChangePredicate { INSTANCE; @Override public boolean apply( - ClusterState previousState, - ClusterState.ClusterStateStatus previousStatus, - ClusterState newState, - ClusterState.ClusterStateStatus newStatus) { + ClusterServiceState previousState, + ClusterServiceState newState) { // checking if the masterNodeId changed is insufficient as the // same master node might get re-elected after a disruption - return newState.nodes().getMasterNodeId() != null && newState != previousState; + return newState.getClusterState().nodes().getMasterNodeId() != null && + newState.getClusterState() != previousState.getClusterState(); } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index ee56d7a61a1..d7964f0c429 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -92,7 +92,7 @@ public class ShardStateAction extends AbstractComponent { } private void sendShardAction(final String actionName, final ClusterStateObserver observer, final ShardEntry shardEntry, final Listener listener) { - DiscoveryNode masterNode = observer.observedState().nodes().getMasterNode(); + DiscoveryNode masterNode = observer.observedState().getClusterState().nodes().getMasterNode(); if (masterNode == null) { logger.warn("{} no master known for action [{}] for shard entry [{}]", shardEntry.shardId, actionName, shardEntry); waitForNewMasterAndRetry(actionName, observer, shardEntry, listener); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java index cb46b22fe7e..9a2f3dc5526 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java @@ -99,10 +99,11 @@ public class AliasValidator extends AbstractComponent { } } - private void validateAliasStandalone(String alias, String indexRouting) { + void validateAliasStandalone(String alias, String indexRouting) { if (!Strings.hasText(alias)) { throw new IllegalArgumentException("alias name is required"); } + MetaDataCreateIndexService.validateIndexOrAliasName(alias, InvalidAliasNameException::new); if (indexRouting != null && indexRouting.indexOf(',') != -1) { throw new IllegalArgumentException("alias [" + alias + "] has several index routing values associated with it"); } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index e719bac6188..faf0a880579 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -29,6 +29,8 @@ import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.util.set.Sets; @@ -38,6 +40,9 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -45,6 +50,7 @@ import java.util.Set; public class IndexTemplateMetaData extends AbstractDiffable { public static final IndexTemplateMetaData PROTO = IndexTemplateMetaData.builder("").build(); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(IndexTemplateMetaData.class)); private final String name; @@ -56,7 +62,7 @@ public class IndexTemplateMetaData extends AbstractDiffable * PUT /_template/my_template * { - * "template": "my_index-*", + * "index_patterns": ["my_index-*"], * "mappings": { ... }, * "version": 1 * } @@ -70,7 +76,7 @@ public class IndexTemplateMetaData extends AbstractDiffable patterns; private final Settings settings; @@ -82,14 +88,14 @@ public class IndexTemplateMetaData extends AbstractDiffable customs; public IndexTemplateMetaData(String name, int order, Integer version, - String template, Settings settings, + List patterns, Settings settings, ImmutableOpenMap mappings, ImmutableOpenMap aliases, ImmutableOpenMap customs) { this.name = name; this.order = order; this.version = version; - this.template = template; + this.patterns= patterns; this.settings = settings; this.mappings = mappings; this.aliases = aliases; @@ -122,12 +128,12 @@ public class IndexTemplateMetaData extends AbstractDiffable patterns() { + return this.patterns; } - public String getTemplate() { - return this.template; + public List getPatterns() { + return this.patterns; } public Settings settings() { @@ -182,7 +188,7 @@ public class IndexTemplateMetaData extends AbstractDiffable 0 ? patterns.get(0) : ""); + } Settings.writeSettingsToStream(settings, out); out.writeVInt(mappings.size()); for (ObjectObjectCursor cursor : mappings) { @@ -252,7 +266,7 @@ public class IndexTemplateMetaData extends AbstractDiffable VALID_FIELDS = Sets.newHashSet("template", "order", "mappings", "settings"); + private static final Set VALID_FIELDS = Sets.newHashSet("template", "order", "mappings", "settings", "index_patterns"); static { VALID_FIELDS.addAll(IndexMetaData.customPrototypes.keySet()); } @@ -263,7 +277,7 @@ public class IndexTemplateMetaData extends AbstractDiffable indexPatterns; private Settings settings = Settings.Builder.EMPTY_SETTINGS; @@ -284,7 +298,7 @@ public class IndexTemplateMetaData extends AbstractDiffable indexPatterns) { + this.indexPatterns = indexPatterns; return this; } - public String template() { - return template; - } public Builder settings(Settings.Builder settings) { this.settings = settings.build(); @@ -361,7 +372,8 @@ public class IndexTemplateMetaData extends AbstractDiffable index_patterns = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + index_patterns.add(parser.text()); + } + builder.patterns(index_patterns); } } else if (token.isValue()) { - if ("template".equals(currentFieldName)) { - builder.template(parser.text()); + // Prior to 5.1.0, elasticsearch only supported a single index pattern called `template` (#21009) + if("template".equals(currentFieldName)) { + DEPRECATION_LOGGER.deprecated("Deprecated field [template] used, replaced by [index_patterns]"); + builder.patterns(Collections.singletonList(parser.text())); } else if ("order".equals(currentFieldName)) { builder.order(parser.intValue()); } else if ("version".equals(currentFieldName)) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index f9034f6a29f..bef6da5ac4e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -88,6 +88,7 @@ import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiFunction; import java.util.function.Predicate; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS; @@ -127,24 +128,37 @@ public class MetaDataCreateIndexService extends AbstractComponent { this.activeShardsObserver = new ActiveShardsObserver(settings, clusterService, threadPool); } + /** + * Validate the name for an index against some static rules and a cluster state. + */ public static void validateIndexName(String index, ClusterState state) { + validateIndexOrAliasName(index, InvalidIndexNameException::new); + if (!index.toLowerCase(Locale.ROOT).equals(index)) { + throw new InvalidIndexNameException(index, "must be lowercase"); + } if (state.routingTable().hasIndex(index)) { throw new IndexAlreadyExistsException(state.routingTable().index(index).getIndex()); } if (state.metaData().hasIndex(index)) { throw new IndexAlreadyExistsException(state.metaData().index(index).getIndex()); } + if (state.metaData().hasAlias(index)) { + throw new InvalidIndexNameException(index, "already exists as alias"); + } + } + + /** + * Validate the name for an index or alias against some static rules. + */ + public static void validateIndexOrAliasName(String index, BiFunction exceptionCtor) { if (!Strings.validFileName(index)) { - throw new InvalidIndexNameException(index, "must not contain the following characters " + Strings.INVALID_FILENAME_CHARS); + throw exceptionCtor.apply(index, "must not contain the following characters " + Strings.INVALID_FILENAME_CHARS); } if (index.contains("#")) { - throw new InvalidIndexNameException(index, "must not contain '#'"); + throw exceptionCtor.apply(index, "must not contain '#'"); } if (index.charAt(0) == '_' || index.charAt(0) == '-' || index.charAt(0) == '+') { - throw new InvalidIndexNameException(index, "must not start with '_', '-', or '+'"); - } - if (!index.toLowerCase(Locale.ROOT).equals(index)) { - throw new InvalidIndexNameException(index, "must be lowercase"); + throw exceptionCtor.apply(index, "must not start with '_', '-', or '+'"); } int byteCount = 0; try { @@ -154,15 +168,10 @@ public class MetaDataCreateIndexService extends AbstractComponent { throw new ElasticsearchException("Unable to determine length of index name", e); } if (byteCount > MAX_INDEX_NAME_BYTES) { - throw new InvalidIndexNameException(index, - "index name is too long, (" + byteCount + - " > " + MAX_INDEX_NAME_BYTES + ")"); - } - if (state.metaData().hasAlias(index)) { - throw new InvalidIndexNameException(index, "already exists as alias"); + throw exceptionCtor.apply(index, "index name is too long, (" + byteCount + " > " + MAX_INDEX_NAME_BYTES + ")"); } if (index.equals(".") || index.equals("..")) { - throw new InvalidIndexNameException(index, "must not be '.' or '..'"); + throw exceptionCtor.apply(index, "must not be '.' or '..'"); } } @@ -455,21 +464,24 @@ public class MetaDataCreateIndexService extends AbstractComponent { } private List findTemplates(CreateIndexClusterStateUpdateRequest request, ClusterState state) throws IOException { - List templates = new ArrayList<>(); + List templateMetadata = new ArrayList<>(); for (ObjectCursor cursor : state.metaData().templates().values()) { - IndexTemplateMetaData template = cursor.value; - if (Regex.simpleMatch(template.template(), request.index())) { - templates.add(template); + IndexTemplateMetaData metadata = cursor.value; + for (String template: metadata.patterns()) { + if (Regex.simpleMatch(template, request.index())) { + templateMetadata.add(metadata); + break; + } } } - CollectionUtil.timSort(templates, new Comparator() { + CollectionUtil.timSort(templateMetadata, new Comparator() { @Override public int compare(IndexTemplateMetaData o1, IndexTemplateMetaData o2) { return o2.order() - o1.order(); } }); - return templates; + return templateMetadata; } private void validate(CreateIndexClusterStateUpdateRequest request, ClusterState state) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index 746598826f9..56c35f9f03c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -40,7 +40,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.indices.IndexTemplateAlreadyExistsException; import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InvalidIndexTemplateException; @@ -131,7 +130,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { listener.onFailure(new IllegalArgumentException("index_template must provide a name")); return; } - if (request.template == null) { + if (request.indexPatterns == null) { listener.onFailure(new IllegalArgumentException("index_template must provide a template")); return; } @@ -161,7 +160,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { @Override public ClusterState execute(ClusterState currentState) throws Exception { if (request.create && currentState.metaData().templates().containsKey(request.name)) { - throw new IndexTemplateAlreadyExistsException(request.name); + throw new IllegalArgumentException("index_template [" + request.name + "] already exists"); } validateAndAddTemplate(request, templateBuilder, indicesService); @@ -209,7 +208,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { templateBuilder.order(request.order); templateBuilder.version(request.version); - templateBuilder.template(request.template); + templateBuilder.patterns(request.indexPatterns); templateBuilder.settings(request.settings); Map> mappingsForValidation = new HashMap<>(); @@ -248,20 +247,22 @@ public class MetaDataIndexTemplateService extends AbstractComponent { if (!request.name.toLowerCase(Locale.ROOT).equals(request.name)) { validationErrors.add("name must be lower cased"); } - if (request.template.contains(" ")) { - validationErrors.add("template must not contain a space"); - } - if (request.template.contains(",")) { - validationErrors.add("template must not contain a ','"); - } - if (request.template.contains("#")) { - validationErrors.add("template must not contain a '#'"); - } - if (request.template.startsWith("_")) { - validationErrors.add("template must not start with '_'"); - } - if (!Strings.validFileNameExcludingAstrix(request.template)) { - validationErrors.add("template must not contain the following characters " + Strings.INVALID_FILENAME_CHARS); + for(String indexPattern : request.indexPatterns) { + if (indexPattern.contains(" ")) { + validationErrors.add("template must not contain a space"); + } + if (indexPattern.contains(",")) { + validationErrors.add("template must not contain a ','"); + } + if (indexPattern.contains("#")) { + validationErrors.add("template must not contain a '#'"); + } + if (indexPattern.startsWith("_")) { + validationErrors.add("template must not start with '_'"); + } + if (!Strings.validFileNameExcludingAstrix(indexPattern)) { + validationErrors.add("template must not contain the following characters " + Strings.INVALID_FILENAME_CHARS); + } } try { @@ -283,8 +284,9 @@ public class MetaDataIndexTemplateService extends AbstractComponent { for (Alias alias : request.aliases) { //we validate the alias only partially, as we don't know yet to which index it'll get applied to aliasValidator.validateAliasStandalone(alias); - if (request.template.equals(alias.name())) { - throw new IllegalArgumentException("Alias [" + alias.name() + "] cannot be the same as the template pattern [" + request.template + "]"); + if (request.indexPatterns.contains(alias.name())) { + throw new IllegalArgumentException("Alias [" + alias.name() + + "] cannot be the same as any pattern in [" + String.join(", ", request.indexPatterns) + "]"); } } } @@ -302,7 +304,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { boolean create; int order; Integer version; - String template; + List indexPatterns; Settings settings = Settings.Builder.EMPTY_SETTINGS; Map mappings = new HashMap<>(); List aliases = new ArrayList<>(); @@ -320,8 +322,8 @@ public class MetaDataIndexTemplateService extends AbstractComponent { return this; } - public PutRequest template(String template) { - this.template = template; + public PutRequest patterns(List indexPatterns) { + this.indexPatterns = indexPatterns; return this; } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 1a4aa51c879..ebf9942e9a3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -20,9 +20,9 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateTaskListener; @@ -34,7 +34,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; @@ -51,10 +50,8 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; /** * Service responsible for submitting mapping changes */ @@ -215,27 +212,24 @@ public class MetaDataMappingService extends AbstractComponent { @Override public BatchResult execute(ClusterState currentState, List tasks) throws Exception { - Set indicesToClose = new HashSet<>(); + Map indexMapperServices = new HashMap<>(); BatchResult.Builder builder = BatchResult.builder(); try { - // precreate incoming indices; for (PutMappingClusterStateUpdateRequest request : tasks) { try { for (Index index : request.indices()) { final IndexMetaData indexMetaData = currentState.metaData().getIndexSafe(index); - if (indicesService.hasIndex(indexMetaData.getIndex()) == false) { - // if the index does not exists we create it once, add all types to the mapper service and - // close it later once we are done with mapping update - indicesToClose.add(indexMetaData.getIndex()); - IndexService indexService = indicesService.createIndex(indexMetaData, Collections.emptyList()); + if (indexMapperServices.containsKey(indexMetaData.getIndex()) == false) { + MapperService mapperService = indicesService.createIndexMapperService(indexMetaData); + indexMapperServices.put(index, mapperService); // add mappings for all types, we need them for cross-type validation for (ObjectCursor mapping : indexMetaData.getMappings().values()) { - indexService.mapperService().merge(mapping.value.type(), mapping.value.source(), + mapperService.merge(mapping.value.type(), mapping.value.source(), MapperService.MergeReason.MAPPING_RECOVERY, request.updateAllTypes()); } } } - currentState = applyRequest(currentState, request); + currentState = applyRequest(currentState, request, indexMapperServices); builder.success(request); } catch (Exception e) { builder.failure(request, e); @@ -243,34 +237,33 @@ public class MetaDataMappingService extends AbstractComponent { } return builder.build(currentState); } finally { - for (Index index : indicesToClose) { - indicesService.removeIndex(index, "created for mapping processing"); - } + IOUtils.close(indexMapperServices.values()); } } - private ClusterState applyRequest(ClusterState currentState, PutMappingClusterStateUpdateRequest request) throws IOException { + private ClusterState applyRequest(ClusterState currentState, PutMappingClusterStateUpdateRequest request, + Map indexMapperServices) throws IOException { String mappingType = request.type(); CompressedXContent mappingUpdateSource = new CompressedXContent(request.source()); final MetaData metaData = currentState.metaData(); - final List> updateList = new ArrayList<>(); + final List updateList = new ArrayList<>(); for (Index index : request.indices()) { - IndexService indexService = indicesService.indexServiceSafe(index); + MapperService mapperService = indexMapperServices.get(index); // IMPORTANT: always get the metadata from the state since it get's batched // and if we pull it from the indexService we might miss an update etc. final IndexMetaData indexMetaData = currentState.getMetaData().getIndexSafe(index); - // this is paranoia... just to be sure we use the exact same indexService and metadata tuple on the update that + // this is paranoia... just to be sure we use the exact same metadata tuple on the update that // we used for the validation, it makes this mechanism little less scary (a little) - updateList.add(new Tuple<>(indexService, indexMetaData)); + updateList.add(indexMetaData); // try and parse it (no need to add it here) so we can bail early in case of parsing exception DocumentMapper newMapper; - DocumentMapper existingMapper = indexService.mapperService().documentMapper(request.type()); + DocumentMapper existingMapper = mapperService.documentMapper(request.type()); if (MapperService.DEFAULT_MAPPING.equals(request.type())) { // _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default - newMapper = indexService.mapperService().parse(request.type(), mappingUpdateSource, false); + newMapper = mapperService.parse(request.type(), mappingUpdateSource, false); } else { - newMapper = indexService.mapperService().parse(request.type(), mappingUpdateSource, existingMapper == null); + newMapper = mapperService.parse(request.type(), mappingUpdateSource, existingMapper == null); if (existingMapper != null) { // first, simulate: just call merge and ignore the result existingMapper.merge(newMapper.mapping(), request.updateAllTypes()); @@ -286,9 +279,9 @@ public class MetaDataMappingService extends AbstractComponent { for (ObjectCursor mapping : indexMetaData.getMappings().values()) { String parentType = newMapper.parentFieldMapper().type(); if (parentType.equals(mapping.value.type()) && - indexService.mapperService().getParentTypes().contains(parentType) == false) { + mapperService.getParentTypes().contains(parentType) == false) { throw new IllegalArgumentException("can't add a _parent field that points to an " + - "already existing type, that isn't already a parent"); + "already existing type, that isn't already a parent"); } } } @@ -306,24 +299,25 @@ public class MetaDataMappingService extends AbstractComponent { throw new InvalidTypeNameException("Document mapping type name can't start with '_', found: [" + mappingType + "]"); } MetaData.Builder builder = MetaData.builder(metaData); - for (Tuple toUpdate : updateList) { + boolean updated = false; + for (IndexMetaData indexMetaData : updateList) { // do the actual merge here on the master, and update the mapping source // we use the exact same indexService and metadata we used to validate above here to actually apply the update - final IndexService indexService = toUpdate.v1(); - final IndexMetaData indexMetaData = toUpdate.v2(); final Index index = indexMetaData.getIndex(); + final MapperService mapperService = indexMapperServices.get(index); CompressedXContent existingSource = null; - DocumentMapper existingMapper = indexService.mapperService().documentMapper(mappingType); + DocumentMapper existingMapper = mapperService.documentMapper(mappingType); if (existingMapper != null) { existingSource = existingMapper.mappingSource(); } - DocumentMapper mergedMapper = indexService.mapperService().merge(mappingType, mappingUpdateSource, MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); + DocumentMapper mergedMapper = mapperService.merge(mappingType, mappingUpdateSource, MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { if (existingSource.equals(updatedSource)) { // same source, no changes, ignore it } else { + updated = true; // use the merged mapping source if (logger.isDebugEnabled()) { logger.debug("{} update_mapping [{}] with source [{}]", index, mergedMapper.type(), updatedSource); @@ -333,6 +327,7 @@ public class MetaDataMappingService extends AbstractComponent { } } else { + updated = true; if (logger.isDebugEnabled()) { logger.debug("{} create_mapping [{}] with source [{}]", index, mappingType, updatedSource); } else if (logger.isInfoEnabled()) { @@ -343,13 +338,16 @@ public class MetaDataMappingService extends AbstractComponent { IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(indexMetaData); // Mapping updates on a single type may have side-effects on other types so we need to // update mapping metadata on all types - for (DocumentMapper mapper : indexService.mapperService().docMappers(true)) { + for (DocumentMapper mapper : mapperService.docMappers(true)) { indexMetaDataBuilder.putMapping(new MappingMetaData(mapper.mappingSource())); } builder.put(indexMetaDataBuilder); } - - return ClusterState.builder(currentState).metaData(builder).build(); + if (updated) { + return ClusterState.builder(currentState).metaData(builder).build(); + } else { + return currentState; + } } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index 619959923e9..a81bfdd1732 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; @@ -572,14 +573,6 @@ public class IndexShardRoutingTable implements Iterable { } public Builder addShard(ShardRouting shardEntry) { - for (ShardRouting shard : shards) { - // don't add two that map to the same node id - // we rely on the fact that a node does not have primary and backup of the same shard - if (shard.assignedToNode() && shardEntry.assignedToNode() - && shard.currentNodeId().equals(shardEntry.currentNodeId())) { - return this; - } - } shards.add(shardEntry); return this; } @@ -590,9 +583,28 @@ public class IndexShardRoutingTable implements Iterable { } public IndexShardRoutingTable build() { + // don't allow more than one shard copy with same id to be allocated to same node + assert distinctNodes(shards) : "more than one shard with same id assigned to same node (shards: " + shards + ")"; return new IndexShardRoutingTable(shardId, Collections.unmodifiableList(new ArrayList<>(shards))); } + static boolean distinctNodes(List shards) { + Set nodes = new HashSet<>(); + for (ShardRouting shard : shards) { + if (shard.assignedToNode()) { + if (nodes.add(shard.currentNodeId()) == false) { + return false; + } + if (shard.relocating()) { + if (nodes.add(shard.relocatingNodeId()) == false) { + return false; + } + } + } + } + return true; + } + public static IndexShardRoutingTable readFrom(StreamInput in) throws IOException { Index index = new Index(in); return readFromThin(in, index); diff --git a/core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index 4f637e05648..ce5c0f3e258 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -81,7 +81,9 @@ import java.util.concurrent.Future; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; +import java.util.function.UnaryOperator; import java.util.stream.Collectors; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; @@ -122,7 +124,7 @@ public class ClusterService extends AbstractLifecycleComponent { private final Queue onGoingTimeouts = ConcurrentCollections.newQueue(); - private volatile ClusterState clusterState; + private final AtomicReference state; private final ClusterBlocks.Builder initialBlocks; @@ -136,7 +138,7 @@ public class ClusterService extends AbstractLifecycleComponent { this.clusterSettings = clusterSettings; this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); // will be replaced on doStart. - this.clusterState = ClusterState.builder(clusterName).build(); + this.state = new AtomicReference<>(new ClusterServiceState(ClusterState.builder(clusterName).build(), ClusterStateStatus.UNKNOWN)); this.clusterSettings.addSettingsUpdateConsumer(CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, this::setSlowTaskLoggingThreshold); @@ -157,9 +159,43 @@ public class ClusterService extends AbstractLifecycleComponent { } public synchronized void setLocalNode(DiscoveryNode localNode) { - assert clusterState.nodes().getLocalNodeId() == null : "local node is already set"; - DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder(clusterState.nodes()).add(localNode).localNodeId(localNode.getId()); - this.clusterState = ClusterState.builder(clusterState).nodes(nodeBuilder).build(); + assert clusterServiceState().getClusterState().nodes().getLocalNodeId() == null : "local node is already set"; + updateState(css -> { + ClusterState clusterState = css.getClusterState(); + DiscoveryNodes nodes = DiscoveryNodes.builder(clusterState.nodes()).add(localNode).localNodeId(localNode.getId()).build(); + return new ClusterServiceState(ClusterState.builder(clusterState).nodes(nodes).build(), css.getClusterStateStatus()); + }); + } + + private void updateState(UnaryOperator updateFunction) { + this.state.getAndUpdate(oldClusterServiceState -> { + ClusterServiceState newClusterServiceState = updateFunction.apply(oldClusterServiceState); + assert validStateTransition(oldClusterServiceState, newClusterServiceState) : + "Invalid cluster service state transition from " + oldClusterServiceState + " to " + newClusterServiceState; + return newClusterServiceState; + }); + } + + private static boolean validStateTransition(ClusterServiceState oldClusterServiceState, ClusterServiceState newClusterServiceState) { + if (oldClusterServiceState == null || newClusterServiceState == null) { + return false; + } + ClusterStateStatus oldStatus = oldClusterServiceState.getClusterStateStatus(); + ClusterStateStatus newStatus = newClusterServiceState.getClusterStateStatus(); + // only go from UNKNOWN to UNKNOWN or BEING_APPLIED + if (oldStatus == ClusterStateStatus.UNKNOWN && newStatus == ClusterStateStatus.APPLIED) { + return false; + } + // only go from BEING_APPLIED to APPLIED + if (oldStatus == ClusterStateStatus.BEING_APPLIED && newStatus != ClusterStateStatus.APPLIED) { + return false; + } + // only go from APPLIED to BEING_APPLIED + if (oldStatus == ClusterStateStatus.APPLIED && newStatus != ClusterStateStatus.BEING_APPLIED) { + return false; + } + boolean identicalClusterState = oldClusterServiceState.getClusterState() == newClusterServiceState.getClusterState(); + return identicalClusterState == (oldStatus == ClusterStateStatus.BEING_APPLIED && newStatus == ClusterStateStatus.APPLIED); } public synchronized void setNodeConnectionsService(NodeConnectionsService nodeConnectionsService) { @@ -197,13 +233,14 @@ public class ClusterService extends AbstractLifecycleComponent { @Override protected synchronized void doStart() { Objects.requireNonNull(clusterStatePublisher, "please set a cluster state publisher before starting"); - Objects.requireNonNull(clusterState.nodes().getLocalNode(), "please set the local node before starting"); + Objects.requireNonNull(clusterServiceState().getClusterState().nodes().getLocalNode(), "please set the local node before starting"); Objects.requireNonNull(nodeConnectionsService, "please set the node connection service before starting"); add(localNodeMasterListeners); - this.clusterState = ClusterState.builder(clusterState).blocks(initialBlocks).build(); + updateState(css -> new ClusterServiceState( + ClusterState.builder(css.getClusterState()).blocks(initialBlocks).build(), + css.getClusterStateStatus())); this.updateTasksExecutor = EsExecutors.newSinglePrioritizing(UPDATE_THREAD_NAME, daemonThreadFactory(settings, UPDATE_THREAD_NAME), threadPool.getThreadContext()); - this.clusterState = ClusterState.builder(clusterState).blocks(initialBlocks).build(); } @Override @@ -235,7 +272,7 @@ public class ClusterService extends AbstractLifecycleComponent { * The local node. */ public DiscoveryNode localNode() { - DiscoveryNode localNode = clusterState.getNodes().getLocalNode(); + DiscoveryNode localNode = state().getNodes().getLocalNode(); if (localNode == null) { throw new IllegalStateException("No local node found. Is the node started?"); } @@ -247,10 +284,17 @@ public class ClusterService extends AbstractLifecycleComponent { } /** - * The current state. + * The current cluster state. */ public ClusterState state() { - return this.clusterState; + return clusterServiceState().getClusterState(); + } + + /** + * The current cluster service state comprising cluster state and cluster state status. + */ + public ClusterServiceState clusterServiceState() { + return this.state.get(); } /** @@ -308,7 +352,7 @@ public class ClusterService extends AbstractLifecycleComponent { /** * Adds a cluster state listener that will timeout after the provided timeout, * and is executed after the clusterstate has been successfully applied ie. is - * in state {@link org.elasticsearch.cluster.ClusterState.ClusterStateStatus#APPLIED} + * in state {@link ClusterStateStatus#APPLIED} * NOTE: a {@code null} timeout means that the listener will never be removed * automatically */ @@ -495,6 +539,13 @@ public class ClusterService extends AbstractLifecycleComponent { return true; } + /** asserts that the current thread is NOT the cluster state update thread */ + public static boolean assertNotClusterStateUpdateThread(String reason) { + assert Thread.currentThread().getName().contains(UPDATE_THREAD_NAME) == false : + "Expected current thread [" + Thread.currentThread() + "] to not be the cluster state update thread. Reason: [" + reason + "]"; + return true; + } + public ClusterName getClusterName() { return clusterName; } @@ -542,7 +593,7 @@ public class ClusterService extends AbstractLifecycleComponent { return; } logger.debug("processing [{}]: execute", tasksSummary); - ClusterState previousClusterState = clusterState; + ClusterState previousClusterState = clusterServiceState().getClusterState(); if (!previousClusterState.nodes().isLocalNodeElectedMaster() && executor.runOnlyOnMaster()) { logger.debug("failing [{}]: local node is no longer master", tasksSummary); toExecute.stream().forEach(task -> task.listener.onNoLongerMaster(task.source)); @@ -653,8 +704,6 @@ public class ClusterService extends AbstractLifecycleComponent { } final Discovery.AckListener ackListener = new DelegetingAckListener(ackListeners); - newClusterState.status(ClusterState.ClusterStateStatus.BEING_APPLIED); - if (logger.isTraceEnabled()) { logger.trace("cluster state updated, source [{}]\n{}", tasksSummary, newClusterState); } else if (logger.isDebugEnabled()) { @@ -694,7 +743,8 @@ public class ClusterService extends AbstractLifecycleComponent { } // update the current cluster state - clusterState = newClusterState; + ClusterState finalNewClusterState = newClusterState; + updateState(css -> new ClusterServiceState(finalNewClusterState, ClusterStateStatus.BEING_APPLIED)); logger.debug("set local cluster state to version {}", newClusterState.version()); try { // nothing to do until we actually recover from the gateway or any other block indicates we need to disable persistency @@ -715,7 +765,7 @@ public class ClusterService extends AbstractLifecycleComponent { nodeConnectionsService.disconnectFromNodes(clusterChangedEvent.nodesDelta().removedNodes()); - newClusterState.status(ClusterState.ClusterStateStatus.APPLIED); + updateState(css -> new ClusterServiceState(css.getClusterState(), ClusterStateStatus.APPLIED)); for (ClusterStateListener listener : postAppliedListeners) { try { diff --git a/core/src/main/java/org/elasticsearch/cluster/service/ClusterServiceState.java b/core/src/main/java/org/elasticsearch/cluster/service/ClusterServiceState.java new file mode 100644 index 00000000000..3002941b482 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/service/ClusterServiceState.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.service; + +import org.elasticsearch.cluster.ClusterState; + +/** + * A simple immutable container class that comprises a cluster state and cluster state status. Used by {@link ClusterService} + * to provide a snapshot view on which cluster state is currently being applied / already applied. + */ +public class ClusterServiceState { + private final ClusterState clusterState; + private final ClusterStateStatus clusterStateStatus; + + public ClusterServiceState(ClusterState clusterState, ClusterStateStatus clusterStateStatus) { + this.clusterState = clusterState; + this.clusterStateStatus = clusterStateStatus; + } + + public ClusterState getClusterState() { + return clusterState; + } + + public ClusterStateStatus getClusterStateStatus() { + return clusterStateStatus; + } + + @Override + public String toString() { + return "version [" + clusterState.version() + "], status [" + clusterStateStatus + "]"; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/IndexShardAlreadyExistsException.java b/core/src/main/java/org/elasticsearch/cluster/service/ClusterStateStatus.java similarity index 63% rename from core/src/main/java/org/elasticsearch/index/IndexShardAlreadyExistsException.java rename to core/src/main/java/org/elasticsearch/cluster/service/ClusterStateStatus.java index 7cdd869821e..419b307be68 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexShardAlreadyExistsException.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/ClusterStateStatus.java @@ -17,20 +17,10 @@ * under the License. */ -package org.elasticsearch.index; +package org.elasticsearch.cluster.service; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.StreamInput; - -import java.io.IOException; - -public class IndexShardAlreadyExistsException extends ElasticsearchException { - - public IndexShardAlreadyExistsException(String message) { - super(message); - } - - public IndexShardAlreadyExistsException(StreamInput in) throws IOException { - super(in); - } -} \ No newline at end of file +public enum ClusterStateStatus { + UNKNOWN, + BEING_APPLIED, + APPLIED; +} diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java b/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java index 1427f34a642..a04c75941e7 100644 --- a/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java +++ b/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java @@ -105,8 +105,11 @@ public interface BlobContainer { Map listBlobsByPrefix(String blobNamePrefix) throws IOException; /** - * Atomically renames the source blob into the target blob. If the source blob does not exist or the - * target blob already exists, an exception is thrown. + * Renames the source blob into the target blob. If the source blob does not exist or the + * target blob already exists, an exception is thrown. Atomicity of the move operation + * can only be guaranteed on an implementation-by-implementation basis. The only current + * implementation of {@link BlobContainer} for which atomicity can be guaranteed is the + * {@link org.elasticsearch.common.blobstore.fs.FsBlobContainer}. * * @param sourceBlobName * The blob to rename. diff --git a/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java b/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java index 9c55f2d2f18..b6ab1892f38 100644 --- a/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java +++ b/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.transport; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -68,6 +69,12 @@ public final class TransportAddress implements Writeable { * Read from a stream. */ public TransportAddress(StreamInput in) throws IOException { + if (in.getVersion().before(Version.V_6_0_0_alpha1)) { // bwc layer for 5.x where we had more than one transport address + final short i = in.readShort(); + if(i != 1) { // we fail hard to ensure nobody tries to use some custom transport address impl even if that is difficult to add + throw new AssertionError("illegal transport ID from node of version: " + in.getVersion() + " got: " + i + " expected: 1"); + } + } final int len = in.readByte(); final byte[] a = new byte[len]; // 4 bytes (IPv4) or 16 bytes (IPv6) in.readFully(a); @@ -78,6 +85,9 @@ public final class TransportAddress implements Writeable { @Override public void writeTo(StreamOutput out) throws IOException { + if (out.getVersion().before(Version.V_6_0_0_alpha1)) { + out.writeShort((short)1); // this maps to InetSocketTransportAddress in 5.x + } byte[] bytes = address.getAddress().getAddress(); // 4 bytes (IPv4) or 16 bytes (IPv6) out.writeByte((byte) bytes.length); // 1 byte out.write(bytes, 0, bytes.length); diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java index c3e60ec5be3..ee9aea9ed70 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.util.concurrent; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transports; @@ -60,7 +61,9 @@ public abstract class BaseFuture implements Future { public V get(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException, ExecutionException { assert timeout <= 0 || - (Transports.assertNotTransportThread(BLOCKING_OP_REASON) && ThreadPool.assertNotScheduleThread(BLOCKING_OP_REASON)); + (Transports.assertNotTransportThread(BLOCKING_OP_REASON) && + ThreadPool.assertNotScheduleThread(BLOCKING_OP_REASON) && + ClusterService.assertNotClusterStateUpdateThread(BLOCKING_OP_REASON)); return sync.get(unit.toNanos(timeout)); } @@ -82,7 +85,9 @@ public abstract class BaseFuture implements Future { */ @Override public V get() throws InterruptedException, ExecutionException { - assert Transports.assertNotTransportThread(BLOCKING_OP_REASON) && ThreadPool.assertNotScheduleThread(BLOCKING_OP_REASON); + assert Transports.assertNotTransportThread(BLOCKING_OP_REASON) && + ThreadPool.assertNotScheduleThread(BLOCKING_OP_REASON) && + ClusterService.assertNotClusterStateUpdateThread(BLOCKING_OP_REASON); return sync.get(); } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java index 2f664679bb4..9662292cf69 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.util.concurrent; - import java.util.concurrent.BlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; @@ -109,6 +108,27 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor { } } + @Override + protected void afterExecute(Runnable r, Throwable t) { + super.afterExecute(r, t); + assert assertDefaultContext(r); + } + + private boolean assertDefaultContext(Runnable r) { + try { + assert contextHolder.isDefaultContext() : "the thread context is not the default context and the thread [" + + Thread.currentThread().getName() + "] is being returned to the pool after executing [" + r + "]"; + } catch (IllegalStateException ex) { + // sometimes we execute on a closed context and isDefaultContext doen't bypass the ensureOpen checks + // this must not trigger an exception here since we only assert if the default is restored and + // we don't really care if we are closed + if (contextHolder.isClosed() == false) { + throw ex; + } + } + return true; + } + /** * Returns a stream of all pending tasks. This is similar to {@link #getQueue()} but will expose the originally submitted * {@link Runnable} instances rather than potentially wrapped ones. diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java index f55c84e943a..813265f19c4 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java @@ -107,6 +107,7 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor { @Override protected void afterExecute(Runnable r, Throwable t) { + super.afterExecute(r, t); current.remove(r); } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 8c04c24ec5b..ca1d364ffd2 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -246,6 +246,20 @@ public final class ThreadContext implements Closeable, Writeable { return command; } + /** + * Returns true if the current context is the default context. + */ + boolean isDefaultContext() { + return threadLocal.get() == DEFAULT_CONTEXT; + } + + /** + * Returns true if the context is closed, otherwise true + */ + boolean isClosed() { + return threadLocal.closed.get(); + } + @FunctionalInterface public interface StoredContext extends AutoCloseable { @Override @@ -468,10 +482,12 @@ public final class ThreadContext implements Closeable, Writeable { */ private class ContextPreservingAbstractRunnable extends AbstractRunnable { private final AbstractRunnable in; - private final ThreadContext.StoredContext ctx; + private final ThreadContext.StoredContext creatorsContext; + + private ThreadContext.StoredContext threadsOriginalContext = null; private ContextPreservingAbstractRunnable(AbstractRunnable in) { - ctx = newStoredContext(); + creatorsContext = newStoredContext(); this.in = in; } @@ -482,7 +498,13 @@ public final class ThreadContext implements Closeable, Writeable { @Override public void onAfter() { - in.onAfter(); + try { + in.onAfter(); + } finally { + if (threadsOriginalContext != null) { + threadsOriginalContext.restore(); + } + } } @Override @@ -498,8 +520,9 @@ public final class ThreadContext implements Closeable, Writeable { @Override protected void doRun() throws Exception { boolean whileRunning = false; - try (ThreadContext.StoredContext ignore = stashContext()){ - ctx.restore(); + threadsOriginalContext = stashContext(); + try { + creatorsContext.restore(); whileRunning = true; in.doRun(); whileRunning = false; diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java index b8a42cd1e13..1d30b79e295 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java @@ -297,7 +297,7 @@ public final class ConstructingObjectParser mapOrdered() throws IOException; + Map mapStrings() throws IOException; + + Map mapStringsOrdered() throws IOException; + List list() throws IOException; List listOrderedMap() throws IOException; diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/XContentType.java b/core/src/main/java/org/elasticsearch/common/xcontent/XContentType.java index 296f9d2aedd..ddd736e0d00 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/XContentType.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/XContentType.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.cbor.CborXContent; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.smile.SmileXContent; @@ -32,7 +33,7 @@ import java.util.Locale; /** * The content type of {@link org.elasticsearch.common.xcontent.XContent}. */ -public enum XContentType { +public enum XContentType implements Writeable { /** * A JSON based content type. @@ -168,7 +169,8 @@ public enum XContentType { throw new IllegalStateException("Unknown XContentType with index [" + index + "]"); } - public static void writeTo(XContentType contentType, StreamOutput out) throws IOException { - out.writeVInt(contentType.index); + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(index); } } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java index c0025499443..d13dcbd9c93 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java @@ -215,6 +215,16 @@ public abstract class AbstractXContentParser implements XContentParser { return readOrderedMap(this); } + @Override + public Map mapStrings() throws IOException { + return readMapStrings(this); + } + + @Override + public Map mapStringsOrdered() throws IOException { + return readOrderedMapStrings(this); + } + @Override public List list() throws IOException { return readList(this); @@ -229,10 +239,18 @@ public abstract class AbstractXContentParser implements XContentParser { Map newMap(); } + interface MapStringsFactory { + Map newMap(); + } + static final MapFactory SIMPLE_MAP_FACTORY = HashMap::new; static final MapFactory ORDERED_MAP_FACTORY = LinkedHashMap::new; + static final MapStringsFactory SIMPLE_MAP_STRINGS_FACTORY = HashMap::new; + + static final MapStringsFactory ORDERED_MAP_STRINGS_FACTORY = LinkedHashMap::new; + static Map readMap(XContentParser parser) throws IOException { return readMap(parser, SIMPLE_MAP_FACTORY); } @@ -241,6 +259,14 @@ public abstract class AbstractXContentParser implements XContentParser { return readMap(parser, ORDERED_MAP_FACTORY); } + static Map readMapStrings(XContentParser parser) throws IOException { + return readMapStrings(parser, SIMPLE_MAP_STRINGS_FACTORY); + } + + static Map readOrderedMapStrings(XContentParser parser) throws IOException { + return readMapStrings(parser, ORDERED_MAP_STRINGS_FACTORY); + } + static List readList(XContentParser parser) throws IOException { return readList(parser, SIMPLE_MAP_FACTORY); } @@ -269,6 +295,26 @@ public abstract class AbstractXContentParser implements XContentParser { return map; } + static Map readMapStrings(XContentParser parser, MapStringsFactory mapStringsFactory) throws IOException { + Map map = mapStringsFactory.newMap(); + XContentParser.Token token = parser.currentToken(); + if (token == null) { + token = parser.nextToken(); + } + if (token == XContentParser.Token.START_OBJECT) { + token = parser.nextToken(); + } + for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) { + // Must point to field name + String fieldName = parser.currentName(); + // And then the value... + parser.nextToken(); + String value = parser.text(); + map.put(fieldName, value); + } + return map; + } + static List readList(XContentParser parser, MapFactory mapFactory) throws IOException { XContentParser.Token token = parser.currentToken(); if (token == null) { diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index 61316a852bb..98ce54428c7 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -24,86 +24,78 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.function.Function; import java.util.function.Supplier; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; +import org.elasticsearch.discovery.zen.ZenPing; import org.elasticsearch.plugins.DiscoveryPlugin; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; /** * A module for loading classes for node discovery. */ -public class DiscoveryModule extends AbstractModule { +public class DiscoveryModule { public static final Setting DISCOVERY_TYPE_SETTING = new Setting<>("discovery.type", "zen", Function.identity(), Property.NodeScope); - public static final Setting DISCOVERY_HOSTS_PROVIDER_SETTING = - new Setting<>("discovery.zen.hosts_provider", DISCOVERY_TYPE_SETTING, Function.identity(), Property.NodeScope); + public static final Setting> DISCOVERY_HOSTS_PROVIDER_SETTING = + new Setting<>("discovery.zen.hosts_provider", (String)null, Optional::ofNullable, Property.NodeScope); - private final Settings settings; - private final UnicastHostsProvider hostsProvider; - private final Map> discoveryTypes = new HashMap<>(); + private final Discovery discovery; - public DiscoveryModule(Settings settings, TransportService transportService, NetworkService networkService, - List plugins) { - this.settings = settings; - addDiscoveryType("none", NoneDiscovery.class); - addDiscoveryType("zen", ZenDiscovery.class); + public DiscoveryModule(Settings settings, ThreadPool threadPool, TransportService transportService, NetworkService networkService, + ClusterService clusterService, List plugins) { + final UnicastHostsProvider hostsProvider; - String discoveryType = DISCOVERY_TYPE_SETTING.get(settings); - if (discoveryType.equals("none") == false) { - Map> hostProviders = new HashMap<>(); - hostProviders.put("zen", () -> Collections::emptyList); - for (DiscoveryPlugin plugin : plugins) { - plugin.getZenHostsProviders(transportService, networkService).entrySet().forEach(entry -> { - if (hostProviders.put(entry.getKey(), entry.getValue()) != null) { - throw new IllegalArgumentException("Cannot specify zen hosts provider [" + entry.getKey() + "] twice"); - } - }); - } - String hostsProviderName = DISCOVERY_HOSTS_PROVIDER_SETTING.get(settings); - Supplier hostsProviderSupplier = hostProviders.get(hostsProviderName); + Map> hostProviders = new HashMap<>(); + for (DiscoveryPlugin plugin : plugins) { + plugin.getZenHostsProviders(transportService, networkService).entrySet().forEach(entry -> { + if (hostProviders.put(entry.getKey(), entry.getValue()) != null) { + throw new IllegalArgumentException("Cannot register zen hosts provider [" + entry.getKey() + "] twice"); + } + }); + } + Optional hostsProviderName = DISCOVERY_HOSTS_PROVIDER_SETTING.get(settings); + if (hostsProviderName.isPresent()) { + Supplier hostsProviderSupplier = hostProviders.get(hostsProviderName.get()); if (hostsProviderSupplier == null) { - throw new IllegalArgumentException("Unknown zen hosts provider [" + hostsProviderName + "]"); + throw new IllegalArgumentException("Unknown zen hosts provider [" + hostsProviderName.get() + "]"); } hostsProvider = Objects.requireNonNull(hostsProviderSupplier.get()); } else { - hostsProvider = null; + hostsProvider = Collections::emptyList; } - } - public UnicastHostsProvider getHostsProvider() { - return hostsProvider; - } - - /** - * Adds a custom Discovery type. - */ - public void addDiscoveryType(String type, Class clazz) { - if (discoveryTypes.containsKey(type)) { - throw new IllegalArgumentException("discovery type [" + type + "] is already registered"); + Map> discoveryTypes = new HashMap<>(); + discoveryTypes.put("zen", () -> new ZenDiscovery(settings, threadPool, transportService, clusterService, hostsProvider)); + discoveryTypes.put("none", () -> new NoneDiscovery(settings, clusterService, clusterService.getClusterSettings())); + for (DiscoveryPlugin plugin : plugins) { + plugin.getDiscoveryTypes(threadPool, transportService, clusterService, hostsProvider).entrySet().forEach(entry -> { + if (discoveryTypes.put(entry.getKey(), entry.getValue()) != null) { + throw new IllegalArgumentException("Cannot register discovery type [" + entry.getKey() + "] twice"); + } + }); } - discoveryTypes.put(type, clazz); - } - - @Override - protected void configure() { String discoveryType = DISCOVERY_TYPE_SETTING.get(settings); - Class discoveryClass = discoveryTypes.get(discoveryType); - if (discoveryClass == null) { - throw new IllegalArgumentException("Unknown Discovery type [" + discoveryType + "]"); + Supplier discoverySupplier = discoveryTypes.get(discoveryType); + if (discoverySupplier == null) { + throw new IllegalArgumentException("Unknown discovery type [" + discoveryType + "]"); } + discovery = Objects.requireNonNull(discoverySupplier.get()); + } - if (discoveryType.equals("none") == false) { - bind(UnicastHostsProvider.class).toInstance(hostsProvider); - } - bind(Discovery.class).to(discoveryClass).asEagerSingleton(); + public Discovery getDiscovery() { + return discovery; } } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java index f92c496c088..58ba7bb177e 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.IncompatibleClusterStateVersionException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterStateStatus; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.compress.Compressor; @@ -397,7 +398,6 @@ public class PublishClusterStateAction extends AbstractComponent { pendingStatesQueue.addPending(incomingState); lastSeenClusterState = incomingState; - lastSeenClusterState.status(ClusterState.ClusterStateStatus.RECEIVED); } channel.sendResponse(TransportResponse.Empty.INSTANCE); } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 90e7d3e2144..f9a16243e00 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -107,7 +107,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover private AllocationService allocationService; private final ClusterName clusterName; private final DiscoverySettings discoverySettings; - private final ZenPing zenPing; + protected final ZenPing zenPing; // protected to allow tests access private final MasterFaultDetection masterFD; private final NodesFaultDetection nodesFD; private final PublishClusterStateAction publishClusterState; @@ -138,15 +138,14 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover private volatile NodeJoinController nodeJoinController; private volatile NodeRemovalClusterStateTaskExecutor nodeRemovalExecutor; - @Inject public ZenDiscovery(Settings settings, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, ClusterSettings clusterSettings, ZenPing zenPing) { + ClusterService clusterService, UnicastHostsProvider hostsProvider) { super(settings); this.clusterService = clusterService; this.clusterName = clusterService.getClusterName(); this.transportService = transportService; - this.discoverySettings = new DiscoverySettings(settings, clusterSettings); - this.zenPing = zenPing; + this.discoverySettings = new DiscoverySettings(settings, clusterService.getClusterSettings()); + this.zenPing = newZenPing(settings, threadPool, transportService, hostsProvider); this.electMaster = new ElectMasterService(settings); this.pingTimeout = PING_TIMEOUT_SETTING.get(settings); this.joinTimeout = JOIN_TIMEOUT_SETTING.get(settings); @@ -161,12 +160,15 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover logger.debug("using ping_timeout [{}], join.timeout [{}], master_election.ignore_non_master [{}]", this.pingTimeout, joinTimeout, masterElectionIgnoreNonMasters); - clusterSettings.addSettingsUpdateConsumer(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, this::handleMinimumMasterNodesChanged, (value) -> { - final ClusterState clusterState = clusterService.state(); - int masterNodes = clusterState.nodes().getMasterNodes().size(); - if (value > masterNodes) { - throw new IllegalArgumentException("cannot set " + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() + " to more than the current master nodes count [" + masterNodes + "]"); - } + clusterService.getClusterSettings().addSettingsUpdateConsumer(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, + this::handleMinimumMasterNodesChanged, (value) -> { + final ClusterState clusterState = clusterService.state(); + int masterNodes = clusterState.nodes().getMasterNodes().size(); + if (value > masterNodes) { + throw new IllegalArgumentException("cannot set " + + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() + " to more than the current" + + " master nodes count [" + masterNodes + "]"); + } }); this.masterFD = new MasterFaultDetection(settings, threadPool, transportService, clusterService); @@ -189,6 +191,12 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover DISCOVERY_REJOIN_ACTION_NAME, RejoinClusterRequest::new, ThreadPool.Names.SAME, new RejoinClusterRequestHandler()); } + // protected to allow overriding in tests + protected ZenPing newZenPing(Settings settings, ThreadPool threadPool, TransportService transportService, + UnicastHostsProvider hostsProvider) { + return new UnicastZenPing(settings, threadPool, transportService, hostsProvider); + } + @Override public void setAllocationService(AllocationService allocationService) { this.allocationService = allocationService; diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index eb0acf4185e..e3964653971 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.cache.query.DisabledQueryCache; import org.elasticsearch.index.cache.query.IndexQueryCache; import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.engine.EngineFactory; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.IndexingOperationListener; @@ -359,6 +360,16 @@ public final class IndexModule { searchOperationListeners, indexOperationListeners); } + /** + * creates a new mapper service to do administrative work like mapping updates. This *should not* be used for document parsing. + * doing so will result in an exception. + */ + public MapperService newIndexMapperService(MapperRegistry mapperRegistry) throws IOException { + return new MapperService(indexSettings, analysisRegistry.build(indexSettings), + new SimilarityService(indexSettings, similarities), mapperRegistry, + () -> { throw new UnsupportedOperationException("no index query shard context available"); }); + } + /** * Forces a certain query cache to use instead of the default one. If this is set * and query caching is not disabled with {@code index.queries.cache.enabled}, then diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 5fc0d1d27cd..40fa4e5bb36 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -93,7 +93,6 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; public class IndexService extends AbstractIndexComponent implements IndicesClusterStateService.AllocatedIndex { private final IndexEventListener eventListener; - private final IndexAnalyzers indexAnalyzers; private final IndexFieldDataService indexFieldData; private final BitsetFilterCache bitsetFilterCache; private final NodeEnvironment nodeEnv; @@ -142,12 +141,11 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust List indexingOperationListeners) throws IOException { super(indexSettings); this.indexSettings = indexSettings; - this.indexAnalyzers = registry.build(indexSettings); this.similarityService = similarityService; - this.mapperService = new MapperService(indexSettings, indexAnalyzers, similarityService, mapperRegistry, + this.mapperService = new MapperService(indexSettings, registry.build(indexSettings), similarityService, mapperRegistry, // we parse all percolator queries as they would be parsed on shard 0 () -> newQueryShardContext(0, null, () -> { - throw new IllegalArgumentException("Percolator queries are not allowed to use the curent timestamp"); + throw new IllegalArgumentException("Percolator queries are not allowed to use the current timestamp"); })); this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, circuitBreakerService, mapperService); this.shardStoreDeleter = shardStoreDeleter; @@ -225,7 +223,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust } public IndexAnalyzers getIndexAnalyzers() { - return this.indexAnalyzers; + return this.mapperService.getIndexAnalyzers(); } public MapperService mapperService() { @@ -249,7 +247,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust } } } finally { - IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, indexAnalyzers, refreshTask, fsyncTask); + IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, mapperService, refreshTask, fsyncTask); } } } @@ -330,7 +328,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust } if (shards.containsKey(shardId.id())) { - throw new IndexShardAlreadyExistsException(shardId + " already exists"); + throw new IllegalStateException(shardId + " already exists"); } logger.debug("creating shard_id {}", shardId); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 1915efcb214..cb2a1af9539 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -23,7 +23,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.StoredField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.LongPoint; -import org.apache.lucene.index.XPointValues; +import org.apache.lucene.index.PointValues; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.BoostQuery; @@ -300,13 +300,13 @@ public class DateFieldMapper extends FieldMapper { @Override public FieldStats.Date stats(IndexReader reader) throws IOException { String field = name(); - long size = XPointValues.size(reader, field); + long size = PointValues.size(reader, field); if (size == 0) { return null; } - int docCount = XPointValues.getDocCount(reader, field); - byte[] min = XPointValues.getMinPackedValue(reader, field); - byte[] max = XPointValues.getMaxPackedValue(reader, field); + int docCount = PointValues.getDocCount(reader, field); + byte[] min = PointValues.getMinPackedValue(reader, field); + byte[] max = PointValues.getMaxPackedValue(reader, field); return new FieldStats.Date(reader.maxDoc(),docCount, -1L, size, isSearchable(), isAggregatable(), dateTimeFormatter(), LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0)); @@ -321,13 +321,13 @@ public class DateFieldMapper extends FieldMapper { dateParser = this.dateMathParser; } - if (XPointValues.size(reader, name()) == 0) { + if (PointValues.size(reader, name()) == 0) { // no points, so nothing matches return Relation.DISJOINT; } - long minValue = LongPoint.decodeDimension(XPointValues.getMinPackedValue(reader, name()), 0); - long maxValue = LongPoint.decodeDimension(XPointValues.getMaxPackedValue(reader, name()), 0); + long minValue = LongPoint.decodeDimension(PointValues.getMinPackedValue(reader, name()), 0); + long maxValue = LongPoint.decodeDimension(PointValues.getMaxPackedValue(reader, name()), 0); long fromInclusive = Long.MIN_VALUE; if (from != null) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 90740b794a8..4be8de2056f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -25,7 +25,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.XPointValues; +import org.apache.lucene.index.PointValues; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; @@ -212,13 +212,13 @@ public class IpFieldMapper extends FieldMapper { @Override public FieldStats.Ip stats(IndexReader reader) throws IOException { String field = name(); - long size = XPointValues.size(reader, field); + long size = PointValues.size(reader, field); if (size == 0) { return null; } - int docCount = XPointValues.getDocCount(reader, field); - byte[] min = XPointValues.getMinPackedValue(reader, field); - byte[] max = XPointValues.getMaxPackedValue(reader, field); + int docCount = PointValues.getDocCount(reader, field); + byte[] min = PointValues.getMinPackedValue(reader, field); + byte[] max = PointValues.getMaxPackedValue(reader, field); return new FieldStats.Ip(reader.maxDoc(), docCount, -1L, size, isSearchable(), isAggregatable(), InetAddressPoint.decode(min), InetAddressPoint.decode(max)); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java index 83a20e03ffe..384331c2d9e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -94,7 +94,6 @@ public abstract class Mapper implements ToXContent, Iterable { private final ParseFieldMatcher parseFieldMatcher; private final Supplier queryShardContextSupplier; - private QueryShardContext queryShardContext; public ParserContext(String type, IndexAnalyzers indexAnalyzers, Function similarityLookupService, MapperService mapperService, Function typeParsers, @@ -138,12 +137,8 @@ public abstract class Mapper implements ToXContent, Iterable { return parseFieldMatcher; } - public QueryShardContext queryShardContext() { - // No need for synchronization, this class must be used in a single thread - if (queryShardContext == null) { - queryShardContext = queryShardContextSupplier.get(); - } - return queryShardContext; + public Supplier queryShardContextSupplier() { + return queryShardContextSupplier; } public boolean isWithinMultiField() { return false; } @@ -161,7 +156,7 @@ public abstract class Mapper implements ToXContent, Iterable { static class MultiFieldParserContext extends ParserContext { MultiFieldParserContext(ParserContext in) { - super(in.type(), in.indexAnalyzers, in.similarityLookupService(), in.mapperService(), in.typeParsers(), in.indexVersionCreated(), in.parseFieldMatcher(), in::queryShardContext); + super(in.type(), in.indexAnalyzers, in.similarityLookupService(), in.mapperService(), in.typeParsers(), in.indexVersionCreated(), in.parseFieldMatcher(), in.queryShardContextSupplier()); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index f2a958f6fcf..d848ce15331 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -44,6 +44,7 @@ import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.indices.TypeMissingException; import org.elasticsearch.indices.mapper.MapperRegistry; +import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -62,7 +63,7 @@ import static java.util.Collections.emptySet; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; -public class MapperService extends AbstractIndexComponent { +public class MapperService extends AbstractIndexComponent implements Closeable { /** * The reason why a mapping is being merged. @@ -624,6 +625,11 @@ public class MapperService extends AbstractIndexComponent { return parentTypes; } + @Override + public void close() throws IOException { + indexAnalyzers.close(); + } + /** * @return Whether a field is a metadata field. */ diff --git a/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index b9bc3a2860a..afdb6c83d50 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -29,7 +29,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.XPointValues; +import org.apache.lucene.index.PointValues; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -241,13 +241,13 @@ public class NumberFieldMapper extends FieldMapper { @Override FieldStats.Double stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { - long size = XPointValues.size(reader, fieldName); + long size = PointValues.size(reader, fieldName); if (size == 0) { return null; } - int docCount = XPointValues.getDocCount(reader, fieldName); - byte[] min = XPointValues.getMinPackedValue(reader, fieldName); - byte[] max = XPointValues.getMaxPackedValue(reader, fieldName); + int docCount = PointValues.getDocCount(reader, fieldName); + byte[] min = PointValues.getMinPackedValue(reader, fieldName); + byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, HalfFloatPoint.decodeDimension(min, 0), HalfFloatPoint.decodeDimension(max, 0)); @@ -325,13 +325,13 @@ public class NumberFieldMapper extends FieldMapper { @Override FieldStats.Double stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { - long size = XPointValues.size(reader, fieldName); + long size = PointValues.size(reader, fieldName); if (size == 0) { return null; } - int docCount = XPointValues.getDocCount(reader, fieldName); - byte[] min = XPointValues.getMinPackedValue(reader, fieldName); - byte[] max = XPointValues.getMaxPackedValue(reader, fieldName); + int docCount = PointValues.getDocCount(reader, fieldName); + byte[] min = PointValues.getMinPackedValue(reader, fieldName); + byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, FloatPoint.decodeDimension(min, 0), FloatPoint.decodeDimension(max, 0)); @@ -409,13 +409,13 @@ public class NumberFieldMapper extends FieldMapper { @Override FieldStats.Double stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { - long size = XPointValues.size(reader, fieldName); + long size = PointValues.size(reader, fieldName); if (size == 0) { return null; } - int docCount = XPointValues.getDocCount(reader, fieldName); - byte[] min = XPointValues.getMinPackedValue(reader, fieldName); - byte[] max = XPointValues.getMaxPackedValue(reader, fieldName); + int docCount = PointValues.getDocCount(reader, fieldName); + byte[] min = PointValues.getMinPackedValue(reader, fieldName); + byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, DoublePoint.decodeDimension(min, 0), DoublePoint.decodeDimension(max, 0)); @@ -627,13 +627,13 @@ public class NumberFieldMapper extends FieldMapper { @Override FieldStats.Long stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { - long size = XPointValues.size(reader, fieldName); + long size = PointValues.size(reader, fieldName); if (size == 0) { return null; } - int docCount = XPointValues.getDocCount(reader, fieldName); - byte[] min = XPointValues.getMinPackedValue(reader, fieldName); - byte[] max = XPointValues.getMaxPackedValue(reader, fieldName); + int docCount = PointValues.getDocCount(reader, fieldName); + byte[] min = PointValues.getMinPackedValue(reader, fieldName); + byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, IntPoint.decodeDimension(min, 0), IntPoint.decodeDimension(max, 0)); @@ -723,13 +723,13 @@ public class NumberFieldMapper extends FieldMapper { @Override FieldStats.Long stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { - long size = XPointValues.size(reader, fieldName); + long size = PointValues.size(reader, fieldName); if (size == 0) { return null; } - int docCount = XPointValues.getDocCount(reader, fieldName); - byte[] min = XPointValues.getMinPackedValue(reader, fieldName); - byte[] max = XPointValues.getMaxPackedValue(reader, fieldName); + int docCount = PointValues.getDocCount(reader, fieldName); + byte[] min = PointValues.getMinPackedValue(reader, fieldName); + byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0)); diff --git a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index f2c83ce2a73..3a98ba36d20 100644 --- a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -576,7 +576,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl if (scriptFields != null) { for (ScriptField field : scriptFields) { SearchScript searchScript = innerHitsContext.getQueryShardContext().getSearchScript(field.script(), - ScriptContext.Standard.SEARCH, Collections.emptyMap()); + ScriptContext.Standard.SEARCH); innerHitsContext.scriptFields().add(new org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField( field.fieldName(), searchScript, field.ignoreFailure())); } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index b569c36ed8f..26c15230279 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -127,8 +127,7 @@ public class QueryRewriteContext implements ParseFieldMatcherSupplier { } public BytesReference getTemplateBytes(Script template) { - ExecutableScript executable = scriptService.executable(template, - ScriptContext.Standard.SEARCH, Collections.emptyMap()); + ExecutableScript executable = scriptService.executable(template, ScriptContext.Standard.SEARCH); return (BytesReference) executable.run(); } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 7c9d958e2eb..df542345911 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -338,18 +338,17 @@ public class QueryShardContext extends QueryRewriteContext { * Compiles (or retrieves from cache) and binds the parameters to the * provided script */ - public final SearchScript getSearchScript(Script script, ScriptContext context, Map params) { + public final SearchScript getSearchScript(Script script, ScriptContext context) { failIfFrozen(); - return scriptService.search(lookup(), script, context, params); + return scriptService.search(lookup(), script, context); } /** * Returns a lazily created {@link SearchScript} that is compiled immediately but can be pulled later once all * parameters are available. */ - public final Function, SearchScript> getLazySearchScript(Script script, ScriptContext context, - Map params) { + public final Function, SearchScript> getLazySearchScript(Script script, ScriptContext context) { failIfFrozen(); - CompiledScript compile = scriptService.compile(script, context, params); + CompiledScript compile = scriptService.compile(script, context, script.getOptions()); return (p) -> scriptService.search(lookup(), compile, p); } @@ -357,19 +356,18 @@ public class QueryShardContext extends QueryRewriteContext { * Compiles (or retrieves from cache) and binds the parameters to the * provided script */ - public final ExecutableScript getExecutableScript(Script script, ScriptContext context, Map params) { + public final ExecutableScript getExecutableScript(Script script, ScriptContext context) { failIfFrozen(); - return scriptService.executable(script, context, params); + return scriptService.executable(script, context); } /** * Returns a lazily created {@link ExecutableScript} that is compiled immediately but can be pulled later once all * parameters are available. */ - public final Function, ExecutableScript> getLazyExecutableScript(Script script, ScriptContext context, - Map params) { + public final Function, ExecutableScript> getLazyExecutableScript(Script script, ScriptContext context) { failIfFrozen(); - CompiledScript executable = scriptService.compile(script, context, params); + CompiledScript executable = scriptService.compile(script, context, script.getOptions()); return (p) -> scriptService.executable(executable, p); } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index eb508b9d4a4..08318874df2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -119,7 +119,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder ALLOWED_QUERY_MAPPER_TYPES; + public static final Set ALLOWED_QUERY_MAPPER_TYPES; static { ALLOWED_QUERY_MAPPER_TYPES = new HashSet<>(); @@ -908,7 +908,11 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder allQueryableDefaultFields(QueryShardContext context) { + /** + * Given a shard context, return a map of all fields in the mappings that + * can be queried. The map will be field name to a float of 1.0f. + */ + public static Map allQueryableDefaultFields(QueryShardContext context) { Collection allFields = context.simpleMatchToIndexNames("*"); Map fields = new HashMap<>(); for (String fieldName : allFields) { @@ -943,6 +947,10 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder resolvedFields = new TreeMap<>(); + if ((useAllFields != null && useAllFields) && (fieldsAndWeights.size() != 0 || this.defaultField != null)) { + throw addValidationError("cannot use [all_fields] parameter in conjunction with [default_field] or [fields]", null); + } + // If explicitly required to use all fields, use all fields, OR: // Automatically determine the fields (to replace the _all field) if all of the following are true: // - The _all field is disabled, diff --git a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index 01f29614e2e..444d79491cb 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -33,14 +33,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; -import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; -import java.util.Collections; import java.util.Objects; import java.util.Optional; @@ -83,7 +79,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder @Override protected void doXContent(XContentBuilder builder, Params builderParams) throws IOException { builder.startObject(NAME); - builder.field(ScriptField.SCRIPT.getPreferredName(), script); + builder.field(Script.SCRIPT_PARSE_FIELD.getPreferredName(), script); printBoostAndQueryName(builder); builder.endObject(); } @@ -104,7 +100,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder } else if (parseContext.isDeprecatedSetting(currentFieldName)) { // skip } else if (token == XContentParser.Token.START_OBJECT) { - if (parseContext.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) { + if (parseContext.getParseFieldMatcher().match(currentFieldName, Script.SCRIPT_PARSE_FIELD)) { script = Script.parse(parser, parseContext.getParseFieldMatcher(), parseContext.getDefaultScriptLanguage()); } else { throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]"); @@ -114,7 +110,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder queryName = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { boost = parser.floatValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) { + } else if (parseContext.getParseFieldMatcher().match(currentFieldName, Script.SCRIPT_PARSE_FIELD)) { script = Script.parse(parser, parseContext.getParseFieldMatcher(), parseContext.getDefaultScriptLanguage()); } else { throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]"); @@ -133,7 +129,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder @Override protected Query doToQuery(QueryShardContext context) throws IOException { - return new ScriptQuery(script, context.getSearchScript(script, ScriptContext.Standard.SEARCH, Collections.emptyMap())); + return new ScriptQuery(script, context.getSearchScript(script, ScriptContext.Standard.SEARCH)); } static class ScriptQuery extends Query { diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java index 67d0d445113..4a49405ec2f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java @@ -270,6 +270,12 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp public Settings() { } + public Settings(Settings other) { + this.lenient = other.lenient; + this.analyzeWildcard = other.analyzeWildcard; + this.quoteFieldSuffix = other.quoteFieldSuffix; + } + /** Specifies whether to use lenient parsing, defaults to false. */ public void lenient(boolean lenient) { this.lenient = lenient; diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index fd297075067..5bc04d13f8b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -106,6 +106,7 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder resolvedFieldsAndWeights = new TreeMap<>(); - // Use the default field if no fields specified - if (fieldsAndWeights.isEmpty()) { - resolvedFieldsAndWeights.put(resolveIndexName(context.defaultField(), context), AbstractQueryBuilder.DEFAULT_BOOST); + + if ((useAllFields != null && useAllFields) && (fieldsAndWeights.size() != 0)) { + throw addValidationError("cannot use [all_fields] parameter in conjunction with [fields]", null); + } + + // If explicitly required to use all fields, use all fields, OR: + // Automatically determine the fields (to replace the _all field) if all of the following are true: + // - The _all field is disabled, + // - and the default_field has not been changed in the settings + // - and no fields are specified in the request + Settings newSettings = new Settings(settings); + if ((this.useAllFields != null && this.useAllFields) || + (context.getMapperService().allEnabled() == false && + "_all".equals(context.defaultField()) && + this.fieldsAndWeights.isEmpty())) { + resolvedFieldsAndWeights = QueryStringQueryBuilder.allQueryableDefaultFields(context); + // Need to use lenient mode when using "all-mode" so exceptions aren't thrown due to mismatched types + newSettings.lenient(true); } else { - for (Map.Entry fieldEntry : fieldsAndWeights.entrySet()) { - if (Regex.isSimpleMatchPattern(fieldEntry.getKey())) { - for (String fieldName : context.getMapperService().simpleMatchToIndexNames(fieldEntry.getKey())) { - resolvedFieldsAndWeights.put(fieldName, fieldEntry.getValue()); + // Use the default field if no fields specified + if (fieldsAndWeights.isEmpty()) { + resolvedFieldsAndWeights.put(resolveIndexName(context.defaultField(), context), AbstractQueryBuilder.DEFAULT_BOOST); + } else { + for (Map.Entry fieldEntry : fieldsAndWeights.entrySet()) { + if (Regex.isSimpleMatchPattern(fieldEntry.getKey())) { + for (String fieldName : context.getMapperService().simpleMatchToIndexNames(fieldEntry.getKey())) { + resolvedFieldsAndWeights.put(fieldName, fieldEntry.getValue()); + } + } else { + resolvedFieldsAndWeights.put(resolveIndexName(fieldEntry.getKey(), context), fieldEntry.getValue()); } - } else { - resolvedFieldsAndWeights.put(resolveIndexName(fieldEntry.getKey(), context), fieldEntry.getValue()); } } } @@ -369,7 +403,7 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(index.getName(), indexExpression, clusterService.state()); + final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexNameMatcher, indexScopeSetting); + final IndexModule indexModule = new IndexModule(idxSettings, indexStoreConfig, analysisRegistry); + pluginsService.onIndexModule(indexModule); + return indexModule.newIndexMapperService(mapperRegistry); + } + /** * This method verifies that the given {@code metaData} holds sane values to create an {@link IndexService}. * This method tries to update the meta data of the created {@link IndexService} if the given {@code metaDataUpdate} is different from the given {@code metaData}. @@ -703,8 +717,9 @@ public class IndicesService extends AbstractLifecycleComponent final IndexMetaData metaData = clusterState.getMetaData().indices().get(shardId.getIndexName()); final IndexSettings indexSettings = buildIndexSettings(metaData); - if (canDeleteShardContent(shardId, indexSettings) == false) { - throw new IllegalStateException("Can't delete shard " + shardId); + ShardDeletionCheckResult shardDeletionCheckResult = canDeleteShardContent(shardId, indexSettings); + if (shardDeletionCheckResult != ShardDeletionCheckResult.FOLDER_FOUND_CAN_DELETE) { + throw new IllegalStateException("Can't delete shard " + shardId + " (cause: " + shardDeletionCheckResult + ")"); } nodeEnv.deleteShardDirectorySafe(shardId, indexSettings); logger.debug("{} deleted shard reason [{}]", shardId, reason); @@ -785,39 +800,50 @@ public class IndicesService extends AbstractLifecycleComponent } /** - * Returns true iff the shards content for the given shard can be deleted. - * This method will return false if: - *
    - *
  • if the shard is still allocated / active on this node
  • - *
  • if for instance if the shard is located on shared and should not be deleted
  • - *
  • if the shards data locations do not exists
  • - *
+ * result type returned by {@link #canDeleteShardContent signaling different reasons why a shard can / cannot be deleted} + */ + public enum ShardDeletionCheckResult { + FOLDER_FOUND_CAN_DELETE, // shard data exists and can be deleted + STILL_ALLOCATED, // the shard is still allocated / active on this node + NO_FOLDER_FOUND, // the shards data locations do not exist + SHARED_FILE_SYSTEM, // the shard is located on shared and should not be deleted + NO_LOCAL_STORAGE // node does not have local storage (see DiscoveryNode.nodeRequiresLocalStorage) + } + + /** + * Returns ShardDeletionCheckResult signaling whether the shards content for the given shard can be deleted. * * @param shardId the shard to delete. * @param indexSettings the shards's relevant {@link IndexSettings}. This is required to access the indexes settings etc. */ - public boolean canDeleteShardContent(ShardId shardId, IndexSettings indexSettings) { + public ShardDeletionCheckResult canDeleteShardContent(ShardId shardId, IndexSettings indexSettings) { assert shardId.getIndex().equals(indexSettings.getIndex()); final IndexService indexService = indexService(shardId.getIndex()); if (indexSettings.isOnSharedFilesystem() == false) { if (nodeEnv.hasNodeFile()) { final boolean isAllocated = indexService != null && indexService.hasShard(shardId.id()); if (isAllocated) { - return false; // we are allocated - can't delete the shard + return ShardDeletionCheckResult.STILL_ALLOCATED; // we are allocated - can't delete the shard } else if (indexSettings.hasCustomDataPath()) { // lets see if it's on a custom path (return false if the shared doesn't exist) // we don't need to delete anything that is not there - return Files.exists(nodeEnv.resolveCustomLocation(indexSettings, shardId)); + return Files.exists(nodeEnv.resolveCustomLocation(indexSettings, shardId)) ? + ShardDeletionCheckResult.FOLDER_FOUND_CAN_DELETE : + ShardDeletionCheckResult.NO_FOLDER_FOUND; } else { // lets see if it's path is available (return false if the shared doesn't exist) // we don't need to delete anything that is not there - return FileSystemUtils.exists(nodeEnv.availableShardPaths(shardId)); + return FileSystemUtils.exists(nodeEnv.availableShardPaths(shardId)) ? + ShardDeletionCheckResult.FOLDER_FOUND_CAN_DELETE : + ShardDeletionCheckResult.NO_FOLDER_FOUND; } - } + } else { + return ShardDeletionCheckResult.NO_LOCAL_STORAGE; + } } else { logger.trace("{} skipping shard directory deletion due to shadow replicas", shardId); + return ShardDeletionCheckResult.SHARED_FILE_SYSTEM; } - return false; } private IndexSettings buildIndexSettings(IndexMetaData metaData) { @@ -1126,7 +1152,7 @@ public class IndicesService extends AbstractLifecycleComponent public void loadIntoContext(ShardSearchRequest request, SearchContext context, QueryPhase queryPhase) throws Exception { assert canCache(request, context); final DirectoryReader directoryReader = context.searcher().getDirectoryReader(); - + boolean[] loadedFromCache = new boolean[] { true }; BytesReference bytesReference = cacheShardLevelResult(context.indexShard(), directoryReader, request.cacheKey(), out -> { queryPhase.execute(context); diff --git a/core/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java b/core/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java index bea40f96d65..8786b206477 100644 --- a/core/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java +++ b/core/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java @@ -33,6 +33,10 @@ public class InvalidAliasNameException extends ElasticsearchException { setIndex(index); } + public InvalidAliasNameException(String name, String description) { + super("Invalid alias name [{}]: {}", name, description); + } + public InvalidAliasNameException(StreamInput in) throws IOException{ super(in); } diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 1fb360ccfd5..682d3f72321 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -31,7 +31,6 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.Type; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingTable; @@ -40,7 +39,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.Callback; @@ -52,7 +50,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexComponent; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexShardAlreadyExistsException; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardRelocatedException; @@ -532,10 +529,6 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple RecoveryState recoveryState = new RecoveryState(shardRouting, nodes.getLocalNode(), sourceNode); indicesService.createShard(shardRouting, recoveryState, recoveryTargetService, new RecoveryListener(shardRouting), repositoriesService, failedShardHandler); - } catch (IndexShardAlreadyExistsException e) { - // ignore this, the method call can happen several times - logger.debug("Trying to create shard that already exists", e); - assert false; } catch (Exception e) { failAndRemoveShard(shardRouting, true, "failed to create shard", e); } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index f3262d4e133..3844f386362 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.ClusterServiceState; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; @@ -399,7 +400,7 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde private void waitForClusterState(long clusterStateVersion) { ClusterStateObserver observer = new ClusterStateObserver(clusterService, TimeValue.timeValueMinutes(5), logger, threadPool.getThreadContext()); - final ClusterState clusterState = observer.observedState(); + final ClusterState clusterState = observer.observedState().getClusterState(); if (clusterState.getVersion() >= clusterStateVersion) { logger.trace("node has cluster state with version higher than {} (current: {})", clusterStateVersion, clusterState.getVersion()); @@ -426,20 +427,20 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde }, new ClusterStateObserver.ValidationPredicate() { @Override - protected boolean validate(ClusterState newState) { - return newState.getVersion() >= clusterStateVersion; + protected boolean validate(ClusterServiceState newState) { + return newState.getClusterState().getVersion() >= clusterStateVersion; } }); try { future.get(); logger.trace("successfully waited for cluster state with version {} (current: {})", clusterStateVersion, - observer.observedState().getVersion()); + observer.observedState().getClusterState().getVersion()); } catch (Exception e) { logger.debug( (Supplier) () -> new ParameterizedMessage( "failed waiting for cluster state with version {} (current: {})", clusterStateVersion, - observer.observedState()), + observer.observedState().getClusterState().getVersion()), e); throw ExceptionsHelper.convertToRuntime(e); } diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index d1520d15027..324903eb43a 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -31,8 +31,11 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.ClusterServiceState; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; @@ -62,7 +65,10 @@ import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.EnumSet; +import java.util.HashSet; +import java.util.Iterator; import java.util.List; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -79,6 +85,9 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe private final TransportService transportService; private final ThreadPool threadPool; + // Cache successful shard deletion checks to prevent unnecessary file system lookups + private final Set folderNotFoundCache = new HashSet<>(); + private TimeValue deleteShardTimeout; @Inject @@ -114,11 +123,31 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe return; } - for (IndexRoutingTable indexRoutingTable : event.state().routingTable()) { + RoutingTable routingTable = event.state().routingTable(); + + // remove entries from cache that don't exist in the routing table anymore (either closed or deleted indices) + // - removing shard data of deleted indices is handled by IndicesClusterStateService + // - closed indices don't need to be removed from the cache but we do it anyway for code simplicity + for (Iterator it = folderNotFoundCache.iterator(); it.hasNext(); ) { + ShardId shardId = it.next(); + if (routingTable.hasIndex(shardId.getIndex()) == false) { + it.remove(); + } + } + // remove entries from cache which are allocated to this node + final String localNodeId = event.state().nodes().getLocalNodeId(); + RoutingNode localRoutingNode = event.state().getRoutingNodes().node(localNodeId); + if (localRoutingNode != null) { + for (ShardRouting routing : localRoutingNode) { + folderNotFoundCache.remove(routing.shardId()); + } + } + + for (IndexRoutingTable indexRoutingTable : routingTable) { // Note, closed indices will not have any routing information, so won't be deleted for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { - if (shardCanBeDeleted(event.state(), indexShardRoutingTable)) { - ShardId shardId = indexShardRoutingTable.shardId(); + ShardId shardId = indexShardRoutingTable.shardId(); + if (folderNotFoundCache.contains(shardId) == false && shardCanBeDeleted(localNodeId, indexShardRoutingTable)) { IndexService indexService = indicesService.indexService(indexRoutingTable.getIndex()); final IndexSettings indexSettings; if (indexService == null) { @@ -127,15 +156,33 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe } else { indexSettings = indexService.getIndexSettings(); } - if (indicesService.canDeleteShardContent(shardId, indexSettings)) { - deleteShardIfExistElseWhere(event.state(), indexShardRoutingTable); + IndicesService.ShardDeletionCheckResult shardDeletionCheckResult = indicesService.canDeleteShardContent(shardId, indexSettings); + switch (shardDeletionCheckResult) { + case FOLDER_FOUND_CAN_DELETE: + deleteShardIfExistElseWhere(event.state(), indexShardRoutingTable); + break; + case NO_FOLDER_FOUND: + folderNotFoundCache.add(shardId); + break; + case NO_LOCAL_STORAGE: + assert false : "shard deletion only runs on data nodes which always have local storage"; + // nothing to do + break; + case STILL_ALLOCATED: + // nothing to do + break; + case SHARED_FILE_SYSTEM: + // nothing to do + break; + default: + assert false : "unknown shard deletion check result: " + shardDeletionCheckResult; } } } } } - boolean shardCanBeDeleted(ClusterState state, IndexShardRoutingTable indexShardRoutingTable) { + static boolean shardCanBeDeleted(String localNodeId, IndexShardRoutingTable indexShardRoutingTable) { // a shard can be deleted if all its copies are active, and its not allocated on this node if (indexShardRoutingTable.size() == 0) { // should not really happen, there should always be at least 1 (primary) shard in a @@ -145,27 +192,12 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe for (ShardRouting shardRouting : indexShardRoutingTable) { // be conservative here, check on started, not even active - if (!shardRouting.started()) { + if (shardRouting.started() == false) { return false; } - // if the allocated or relocation node id doesn't exists in the cluster state it may be a stale node, - // make sure we don't do anything with this until the routing table has properly been rerouted to reflect - // the fact that the node does not exists - DiscoveryNode node = state.nodes().get(shardRouting.currentNodeId()); - if (node == null) { - return false; - } - if (shardRouting.relocatingNodeId() != null) { - node = state.nodes().get(shardRouting.relocatingNodeId()); - if (node == null) { - return false; - } - } - - // check if shard is active on the current node or is getting relocated to the our node - String localNodeId = state.getNodes().getLocalNode().getId(); - if (localNodeId.equals(shardRouting.currentNodeId()) || localNodeId.equals(shardRouting.relocatingNodeId())) { + // check if shard is active on the current node + if (localNodeId.equals(shardRouting.currentNodeId())) { return false; } } @@ -178,19 +210,13 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe String indexUUID = indexShardRoutingTable.shardId().getIndex().getUUID(); ClusterName clusterName = state.getClusterName(); for (ShardRouting shardRouting : indexShardRoutingTable) { - // Node can't be null, because otherwise shardCanBeDeleted() would have returned false + assert shardRouting.started() : "expected started shard but was " + shardRouting; DiscoveryNode currentNode = state.nodes().get(shardRouting.currentNodeId()); - assert currentNode != null; - requests.add(new Tuple<>(currentNode, new ShardActiveRequest(clusterName, indexUUID, shardRouting.shardId(), deleteShardTimeout))); - if (shardRouting.relocatingNodeId() != null) { - DiscoveryNode relocatingNode = state.nodes().get(shardRouting.relocatingNodeId()); - assert relocatingNode != null; - requests.add(new Tuple<>(relocatingNode, new ShardActiveRequest(clusterName, indexUUID, shardRouting.shardId(), deleteShardTimeout))); - } } - ShardActiveResponseHandler responseHandler = new ShardActiveResponseHandler(indexShardRoutingTable.shardId(), state, requests.size()); + ShardActiveResponseHandler responseHandler = new ShardActiveResponseHandler(indexShardRoutingTable.shardId(), state.getVersion(), + requests.size()); for (Tuple request : requests) { logger.trace("{} sending shard active check to {}", request.v2().shardId, request.v1()); transportService.sendRequest(request.v1(), ACTION_SHARD_EXISTS, request.v2(), responseHandler); @@ -201,14 +227,14 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe private final ShardId shardId; private final int expectedActiveCopies; - private final ClusterState clusterState; + private final long clusterStateVersion; private final AtomicInteger awaitingResponses; private final AtomicInteger activeCopies; - public ShardActiveResponseHandler(ShardId shardId, ClusterState clusterState, int expectedActiveCopies) { + public ShardActiveResponseHandler(ShardId shardId, long clusterStateVersion, int expectedActiveCopies) { this.shardId = shardId; this.expectedActiveCopies = expectedActiveCopies; - this.clusterState = clusterState; + this.clusterStateVersion = clusterStateVersion; this.awaitingResponses = new AtomicInteger(expectedActiveCopies); this.activeCopies = new AtomicInteger(); } @@ -250,8 +276,8 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe } ClusterState latestClusterState = clusterService.state(); - if (clusterState.getVersion() != latestClusterState.getVersion()) { - logger.trace("not deleting shard {}, the latest cluster state version[{}] is not equal to cluster state before shard active api call [{}]", shardId, latestClusterState.getVersion(), clusterState.getVersion()); + if (clusterStateVersion != latestClusterState.getVersion()) { + logger.trace("not deleting shard {}, the latest cluster state version[{}] is not equal to cluster state before shard active api call [{}]", shardId, latestClusterState.getVersion(), clusterStateVersion); return; } @@ -263,8 +289,8 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe @Override public ClusterState execute(ClusterState currentState) throws Exception { - if (clusterState.getVersion() != currentState.getVersion()) { - logger.trace("not deleting shard {}, the update task state version[{}] is not equal to cluster state before shard active api call [{}]", shardId, currentState.getVersion(), clusterState.getVersion()); + if (clusterStateVersion != currentState.getVersion()) { + logger.trace("not deleting shard {}, the update task state version[{}] is not equal to cluster state before shard active api call [{}]", shardId, currentState.getVersion(), clusterStateVersion); return currentState; } try { @@ -334,7 +360,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe } }, new ClusterStateObserver.ValidationPredicate() { @Override - protected boolean validate(ClusterState newState) { + protected boolean validate(ClusterServiceState newState) { // the shard is not there in which case we want to send back a false (shard is not active), so the cluster state listener must be notified // or the shard is active in which case we want to send back that the shard is active // here we could also evaluate the cluster state and get the information from there. we diff --git a/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java b/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java index a6c45b71697..6bb410c78ea 100644 --- a/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java +++ b/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java @@ -43,7 +43,7 @@ public class InternalTemplateService implements TemplateService { int mustacheStart = template.indexOf("{{"); int mustacheEnd = template.indexOf("}}"); if (mustacheStart != -1 && mustacheEnd != -1 && mustacheStart < mustacheEnd) { - Script script = new Script(template, ScriptType.INLINE, "mustache", Collections.emptyMap()); + Script script = new Script(ScriptType.INLINE, "mustache", template, Collections.emptyMap()); CompiledScript compiledScript = scriptService.compile( script, ScriptContext.Standard.INGEST, diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index f5ad4ff8772..9eb7f9a0376 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -19,10 +19,7 @@ package org.elasticsearch.node; -import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.core.LoggerContext; -import org.apache.logging.log4j.core.config.Configurator; import org.apache.lucene.util.Constants; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Build; @@ -35,9 +32,11 @@ import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.client.Client; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.MasterNodeChangePredicate; import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; @@ -307,6 +306,7 @@ public class Node implements Closeable { for (final ExecutorBuilder builder : threadPool.builders()) { additionalSettings.addAll(builder.getRegisteredSettings()); } + client = new NodeClient(settings, threadPool); final ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, threadPool); final ScriptModule scriptModule = ScriptModule.create(settings, this.environment, resourceWatcherService, pluginsService.filterPlugins(ScriptPlugin.class)); @@ -327,6 +327,7 @@ public class Node implements Closeable { resourcesToClose.add(tribeService); final IngestService ingestService = new IngestService(settings, threadPool, this.environment, scriptModule.getScriptService(), analysisModule.getAnalysisRegistry(), pluginsService.filterPlugins(IngestPlugin.class)); + final ClusterInfoService clusterInfoService = newClusterInfoService(settings, clusterService, threadPool, client); ModulesBuilder modules = new ModulesBuilder(); // plugin modules must be added here, before others or we can get crazy injection errors... @@ -362,7 +363,6 @@ public class Node implements Closeable { .flatMap(Function.identity()).collect(Collectors.toList()); final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); final MetaStateService metaStateService = new MetaStateService(settings, nodeEnvironment); - client = new NodeClient(settings, threadPool); final IndicesService indicesService = new IndicesService(settings, pluginsService, nodeEnvironment, settingsModule.getClusterSettings(), analysisModule.getAnalysisRegistry(), searchModule.getQueryParserRegistry(), clusterModule.getIndexNameExpressionResolver(), indicesModule.getMapperRegistry(), namedWriteableRegistry, @@ -397,10 +397,9 @@ public class Node implements Closeable { b.bind(HttpServer.class).toProvider(Providers.of(null)); }; } - final DiscoveryModule discoveryModule = new DiscoveryModule(this.settings, transportService, networkService, - pluginsService.filterPlugins(DiscoveryPlugin.class)); - final ZenPing zenPing = newZenPing(settings, threadPool, transportService, discoveryModule.getHostsProvider()); - modules.add(discoveryModule); + + final DiscoveryModule discoveryModule = new DiscoveryModule(this.settings, threadPool, transportService, + networkService, clusterService, pluginsService.filterPlugins(DiscoveryPlugin.class)); pluginsService.processModules(modules); modules.add(b -> { b.bind(IndicesQueriesRegistry.class).toInstance(searchModule.getQueryParserRegistry()); @@ -432,7 +431,8 @@ public class Node implements Closeable { b.bind(UpdateHelper.class).toInstance(new UpdateHelper(settings, scriptModule.getScriptService())); b.bind(MetaDataIndexUpgradeService.class).toInstance(new MetaDataIndexUpgradeService(settings, indicesModule.getMapperRegistry(), settingsModule.getIndexScopedSettings())); - b.bind(ZenPing.class).toInstance(zenPing); + b.bind(ClusterInfoService.class).toInstance(clusterInfoService); + b.bind(Discovery.class).toInstance(discoveryModule.getDiscovery()); { RecoverySettings recoverySettings = new RecoverySettings(settings, settingsModule.getClusterSettings()); processRecoverySettings(settingsModule.getClusterSettings(), recoverySettings); @@ -590,7 +590,7 @@ public class Node implements Closeable { if (DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.get(settings).millis() > 0) { final ThreadPool thread = injector.getInstance(ThreadPool.class); ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, thread.getThreadContext()); - if (observer.observedState().nodes().getMasterNodeId() == null) { + if (observer.observedState().getClusterState().nodes().getMasterNodeId() == null) { final CountDownLatch latch = new CountDownLatch(1); observer.waitForNextChange(new ClusterStateObserver.Listener() { @Override @@ -624,7 +624,6 @@ public class Node implements Closeable { // start nodes now, after the http server, because it may take some time tribeService.startNodes(); - if (WRITE_PORTS_FIELD_SETTING.get(settings)) { if (NetworkModule.HTTP_ENABLED.get(settings)) { HttpServerTransport http = injector.getInstance(HttpServerTransport.class); @@ -765,24 +764,6 @@ public class Node implements Closeable { } IOUtils.close(toClose); logger.info("closed"); - - final String log4jShutdownEnabled = System.getProperty("es.log4j.shutdownEnabled", "true"); - final boolean shutdownEnabled; - switch (log4jShutdownEnabled) { - case "true": - shutdownEnabled = true; - break; - case "false": - shutdownEnabled = false; - break; - default: - throw new IllegalArgumentException( - "invalid value for [es.log4j.shutdownEnabled], was [" + log4jShutdownEnabled + "] but must be [true] or [false]"); - } - if (shutdownEnabled) { - LoggerContext context = (LoggerContext) LogManager.getContext(false); - Configurator.shutdown(context); - } } @@ -890,14 +871,14 @@ public class Node implements Closeable { return customNameResolvers; } - /** Create a new ZenPing instance for use in zen discovery. */ - protected ZenPing newZenPing(Settings settings, ThreadPool threadPool, TransportService transportService, - UnicastHostsProvider hostsProvider) { - return new UnicastZenPing(settings, threadPool, transportService, hostsProvider); - } - /** Constructs an internal node used as a client into a cluster fronted by this tribe node. */ protected Node newTribeClientNode(Settings settings, Collection> classpathPlugins) { return new Node(new Environment(settings), classpathPlugins); } + + /** Constructs a ClusterInfoService which may be mocked for tests. */ + protected ClusterInfoService newClusterInfoService(Settings settings, ClusterService clusterService, + ThreadPool threadPool, NodeClient client) { + return new InternalClusterInfoService(settings, clusterService, threadPool, client); + } } diff --git a/core/src/main/java/org/elasticsearch/plugins/ActionPlugin.java b/core/src/main/java/org/elasticsearch/plugins/ActionPlugin.java index 3d769d27a87..2198297129e 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ActionPlugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/ActionPlugin.java @@ -49,7 +49,7 @@ public interface ActionPlugin { /** * Actions added by this plugin. */ - default List, ? extends ActionResponse>> getActions() { + default List> getActions() { return Collections.emptyList(); } /** @@ -72,7 +72,7 @@ public interface ActionPlugin { return Collections.emptyList(); } - final class ActionHandler, Response extends ActionResponse> { + final class ActionHandler { private final GenericAction action; private final Class> transportAction; private final Class[] supportTransportActions; diff --git a/core/src/main/java/org/elasticsearch/plugins/DiscoveryPlugin.java b/core/src/main/java/org/elasticsearch/plugins/DiscoveryPlugin.java index 6d3e7f90e2f..37b97855084 100644 --- a/core/src/main/java/org/elasticsearch/plugins/DiscoveryPlugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/DiscoveryPlugin.java @@ -23,9 +23,13 @@ import java.util.Collections; import java.util.Map; import java.util.function.Supplier; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.zen.UnicastHostsProvider; +import org.elasticsearch.discovery.zen.ZenPing; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; /** @@ -42,6 +46,24 @@ import org.elasticsearch.transport.TransportService; * } */ public interface DiscoveryPlugin { + + /** + * Returns custom discovery implementations added by this plugin. + * + * The key of the returned map is the name of the discovery implementation + * (see {@link org.elasticsearch.discovery.DiscoveryModule#DISCOVERY_TYPE_SETTING}, and + * the value is a supplier to construct the {@link Discovery}. + * + * @param threadPool Use to schedule ping actions + * @param transportService Use to communicate with other nodes + * @param clusterService Use to find current nodes in the cluster + * @param hostsProvider Use to find configured hosts which should be pinged for initial discovery + */ + default Map> getDiscoveryTypes(ThreadPool threadPool, TransportService transportService, + ClusterService clusterService, UnicastHostsProvider hostsProvider) { + return Collections.emptyMap(); + } + /** * Override to add additional {@link NetworkService.CustomNameResolver}s. * This can be handy if you want to provide your own Network interface name like _mycard_ diff --git a/core/src/main/java/org/elasticsearch/plugins/Plugin.java b/core/src/main/java/org/elasticsearch/plugins/Plugin.java index 1e39edc6341..7bb554df9a3 100644 --- a/core/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -25,6 +25,7 @@ import java.util.List; import org.elasticsearch.action.ActionModule; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.LifecycleComponent; @@ -35,8 +36,10 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.index.IndexModule; import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.repositories.RepositoriesModule; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; @@ -206,7 +209,7 @@ public abstract class Plugin { public final void onModule(ActionModule module) {} /** - * Old-style action extension point. {@code @Deprecated} and {@code final} to act as a signpost for plugin authors upgrading + * Old-style search extension point. {@code @Deprecated} and {@code final} to act as a signpost for plugin authors upgrading * from 2.x. * * @deprecated implement {@link SearchPlugin} instead @@ -215,11 +218,38 @@ public abstract class Plugin { public final void onModule(SearchModule module) {} /** - * Old-style action extension point. {@code @Deprecated} and {@code final} to act as a signpost for plugin authors upgrading + * Old-style network extension point. {@code @Deprecated} and {@code final} to act as a signpost for plugin authors upgrading * from 2.x. * * @deprecated implement {@link NetworkPlugin} instead */ @Deprecated public final void onModule(NetworkModule module) {} + + /** + * Old-style snapshot/restore extension point. {@code @Deprecated} and {@code final} to act as a signpost for plugin authors upgrading + * from 2.x. + * + * @deprecated implement {@link RepositoryPlugin} instead + */ + @Deprecated + public final void onModule(RepositoriesModule module) {} + + /** + * Old-style cluster extension point. {@code @Deprecated} and {@code final} to act as a signpost for plugin authors upgrading + * from 2.x. + * + * @deprecated implement {@link ClusterPlugin} instead + */ + @Deprecated + public final void onModule(ClusterModule module) {} + + /** + * Old-style discovery extension point. {@code @Deprecated} and {@code final} to act as a signpost for plugin authors upgrading + * from 2.x. + * + * @deprecated implement {@link DiscoveryPlugin} instead + */ + @Deprecated + public final void onModule(DiscoveryModule module) {} } diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index a1bad51626c..3db9d4340ef 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -867,15 +867,17 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } private void writeAtomic(final String blobName, final BytesReference bytesRef) throws IOException { - final String tempBlobName = "pending-" + blobName; + final String tempBlobName = "pending-" + blobName + "-" + UUIDs.randomBase64UUID(); try (InputStream stream = bytesRef.streamInput()) { snapshotsBlobContainer.writeBlob(tempBlobName, stream, bytesRef.length()); - } - try { snapshotsBlobContainer.move(tempBlobName, blobName); } catch (IOException ex) { - // Move failed - try cleaning up - snapshotsBlobContainer.deleteBlob(tempBlobName); + // temporary blob creation or move failed - try cleaning up + try { + snapshotsBlobContainer.deleteBlob(tempBlobName); + } catch (IOException e) { + ex.addSuppressed(e); + } throw ex; } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java index aee4eb3a9e2..fd8a56b4df1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java @@ -21,7 +21,10 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -29,9 +32,13 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.AcknowledgedRestListener; import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; public class RestPutIndexTemplateAction extends BaseRestHandler { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(RestPutIndexTemplateAction.class)); + @Inject public RestPutIndexTemplateAction(Settings settings, RestController controller) { super(settings); @@ -42,7 +49,12 @@ public class RestPutIndexTemplateAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest(request.param("name")); - putRequest.template(request.param("template", putRequest.template())); + if (request.hasParam("template")) { + DEPRECATION_LOGGER.deprecated("Deprecated parameter[template] used, replaced by [index_patterns]"); + putRequest.patterns(Collections.singletonList(request.param("template"))); + } else { + putRequest.patterns(Arrays.asList(request.paramAsStringArray("index_patterns", Strings.EMPTY_ARRAY))); + } putRequest.order(request.paramAsInt("order", putRequest.order())); putRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRequest.masterNodeTimeout())); putRequest.create(request.paramAsBoolean("create", false)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java index b62009512a4..486fbb93d76 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java @@ -70,7 +70,7 @@ public class RestTemplatesAction extends AbstractCatAction { Table table = new Table(); table.startHeaders(); table.addCell("name", "alias:n;desc:template name"); - table.addCell("template", "alias:t;desc:template pattern string"); + table.addCell("index_patterns", "alias:t;desc:template index patterns"); table.addCell("order", "alias:o;desc:template application order number"); table.addCell("version", "alias:v;desc:version"); table.endHeaders(); @@ -85,7 +85,7 @@ public class RestTemplatesAction extends AbstractCatAction { if (patternString == null || Regex.simpleMatch(patternString, indexData.name())) { table.startRow(); table.addCell(indexData.name()); - table.addCell(indexData.getTemplate()); + table.addCell("[" + String.join(", ", indexData.patterns()) + "]"); table.addCell(indexData.getOrder()); table.addCell(indexData.getVersion()); table.endRow(); diff --git a/core/src/main/java/org/elasticsearch/script/Script.java b/core/src/main/java/org/elasticsearch/script/Script.java index e33da6d752a..e3b7ee9a137 100644 --- a/core/src/main/java/org/elasticsearch/script/Script.java +++ b/core/src/main/java/org/elasticsearch/script/Script.java @@ -19,282 +19,600 @@ package org.elasticsearch.script; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.Nullable; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.ParseFieldMatcherSupplier; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.Objects; /** - * Script holds all the parameters necessary to compile or find in cache and then execute a script. + * Script represents used-defined input that can be used to + * compile and execute a script from the {@link ScriptService} + * based on the {@link ScriptType}. */ public final class Script implements ToXContent, Writeable { + /** + * The name of the of the default scripting language. + */ public static final String DEFAULT_SCRIPT_LANG = "painless"; - private String script; - private ScriptType type; - @Nullable private String lang; - @Nullable private Map params; - @Nullable private XContentType contentType; + /** + * The name of the default template language. + */ + public static final String DEFAULT_TEMPLATE_LANG = "mustache"; /** - * Constructor for simple inline script. The script will have no lang or params set. - * - * @param script The inline script to execute. + * The default {@link ScriptType}. */ - public Script(String script) { - this(script, ScriptType.INLINE, null, null); - } - - public Script(String script, ScriptType type, String lang, @Nullable Map params) { - this(script, type, lang, params, null); - } + public static final ScriptType DEFAULT_SCRIPT_TYPE = ScriptType.INLINE; /** - * Constructor for Script. - * - * @param script The cache key of the script to be compiled/executed. For inline scripts this is the actual - * script source code. For indexed scripts this is the id used in the request. For on file - * scripts this is the file name. - * @param type The type of script -- dynamic, stored, or file. - * @param lang The language of the script to be compiled/executed. - * @param params The map of parameters the script will be executed with. - * @param contentType The {@link XContentType} of the script. Only relevant for inline scripts that have not been - * defined as a plain string, but as json or yaml content. This class needs this information - * when serializing the script back to xcontent. + * Compiler option for {@link XContentType} used for templates. */ - @SuppressWarnings("unchecked") - public Script(String script, ScriptType type, String lang, @Nullable Map params, - @Nullable XContentType contentType) { - if (contentType != null && type != ScriptType.INLINE) { - throw new IllegalArgumentException("The parameter contentType only makes sense for inline scripts"); + public static final String CONTENT_TYPE_OPTION = "content_type"; + + /** + * Standard {@link ParseField} for outer level of script queries. + */ + public static final ParseField SCRIPT_PARSE_FIELD = new ParseField("script"); + + /** + * Standard {@link ParseField} for lang on the inner level. + */ + public static final ParseField LANG_PARSE_FIELD = new ParseField("lang"); + + /** + * Standard {@link ParseField} for options on the inner level. + */ + public static final ParseField OPTIONS_PARSE_FIELD = new ParseField("options"); + + /** + * Standard {@link ParseField} for params on the inner level. + */ + public static final ParseField PARAMS_PARSE_FIELD = new ParseField("params"); + + /** + * Unreleased version used for {@link Script} non-null members format of read/write. + */ + public static final Version V_5_1_0_UNRELEASED = Version.fromId(5010099); + + /** + * Helper class used by {@link ObjectParser} to store mutable {@link Script} variables and then + * construct an immutable {@link Script} object based on parsed XContent. + */ + private static final class Builder { + private ScriptType type; + private String lang; + private String idOrCode; + private Map options; + private Map params; + + private Builder() { + // This cannot default to an empty map because options are potentially added at multiple points. + this.options = new HashMap<>(); + this.params = Collections.emptyMap(); } - this.script = Objects.requireNonNull(script); + + /** + * Since inline scripts can accept code rather than just an id, they must also be able + * to handle template parsing, hence the need for custom parsing code. Templates can + * consist of either an {@link String} or a JSON object. If a JSON object is discovered + * then the content type option must also be saved as a compiler option. + */ + private void setInline(XContentParser parser) { + try { + if (type != null) { + throwOnlyOneOfType(); + } + + type = ScriptType.INLINE; + + if (parser.currentToken() == Token.START_OBJECT) { + XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()); + idOrCode = builder.copyCurrentStructure(parser).bytes().utf8ToString(); + options.put(CONTENT_TYPE_OPTION, parser.contentType().mediaType()); + } else { + idOrCode = parser.text(); + } + } catch (IOException exception) { + throw new UncheckedIOException(exception); + } + } + + /** + * Set both the id and the type of the stored script. + */ + private void setStored(String idOrCode) { + if (type != null) { + throwOnlyOneOfType(); + } + + type = ScriptType.STORED; + this.idOrCode = idOrCode; + } + + /** + * Set both the id and the type of the file script. + */ + private void setFile(String idOrCode) { + if (type != null) { + throwOnlyOneOfType(); + } + + type = ScriptType.FILE; + this.idOrCode = idOrCode; + } + + /** + * Helper method to throw an exception if more than one type of {@link Script} is specified. + */ + private void throwOnlyOneOfType() { + throw new IllegalArgumentException("must only use one of [" + + ScriptType.INLINE.getParseField().getPreferredName() + " + , " + + ScriptType.STORED.getParseField().getPreferredName() + " + , " + + ScriptType.FILE.getParseField().getPreferredName() + "]" + + " when specifying a script"); + } + + private void setLang(String lang) { + this.lang = lang; + } + + /** + * Options may have already been added if an inline template was specified. + * Appends the user-defined compiler options with the internal compiler options. + */ + private void setOptions(Map options) { + this.options.putAll(options); + } + + private void setParams(Map params) { + this.params = params; + } + + /** + * Validates the parameters and creates an {@link Script}. + * @param defaultLang The default lang is not a compile-time constant and must be provided + * at run-time this way in case a legacy default language is used from + * previously stored queries. + */ + private Script build(String defaultLang) { + if (type == null) { + throw new IllegalArgumentException( + "must specify either code for an [" + ScriptType.INLINE.getParseField().getPreferredName() + "] script " + + "or an id for a [" + ScriptType.STORED.getParseField().getPreferredName() + "] script " + + "or [" + ScriptType.FILE.getParseField().getPreferredName() + "] script"); + } + + if (idOrCode == null) { + throw new IllegalArgumentException("must specify an id or code for a script"); + } + + if (options.size() > 1 || options.size() == 1 && options.get(CONTENT_TYPE_OPTION) == null) { + throw new IllegalArgumentException("illegal compiler options [" + options + "] specified"); + } + + return new Script(type, this.lang == null ? defaultLang : this.lang, idOrCode, options, params); + } + } + + private static final ObjectParser PARSER = new ObjectParser<>("script", Builder::new); + + static { + // Defines the fields necessary to parse a Script as XContent using an ObjectParser. + PARSER.declareField(Builder::setInline, parser -> parser, ScriptType.INLINE.getParseField(), ValueType.OBJECT_OR_STRING); + PARSER.declareString(Builder::setStored, ScriptType.STORED.getParseField()); + PARSER.declareString(Builder::setFile, ScriptType.FILE.getParseField()); + PARSER.declareString(Builder::setLang, LANG_PARSE_FIELD); + PARSER.declareField(Builder::setOptions, XContentParser::mapStrings, OPTIONS_PARSE_FIELD, ValueType.OBJECT); + PARSER.declareField(Builder::setParams, XContentParser::map, PARAMS_PARSE_FIELD, ValueType.OBJECT); + } + + /** + * Convenience method to call {@link Script#parse(XContentParser, ParseFieldMatcher, String)} + * using the default scripting language. + */ + public static Script parse(XContentParser parser, ParseFieldMatcher matcher) throws IOException { + return parse(parser, matcher, DEFAULT_SCRIPT_LANG); + } + + /** + * Convenience method to call {@link Script#parse(XContentParser, ParseFieldMatcher, String)} using the + * {@link ParseFieldMatcher} and scripting language provided by the {@link QueryParseContext}. + */ + public static Script parse(XContentParser parser, QueryParseContext context) throws IOException { + return parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage()); + } + + /** + * This will parse XContent into a {@link Script}. The following formats can be parsed: + * + * The simple format defaults to an {@link ScriptType#INLINE} with no compiler options or user-defined params: + * + * Example: + * {@code + * "return Math.log(doc.popularity) * 100;" + * } + * + * The complex format where {@link ScriptType} and idOrCode are required while lang, options and params are not required. + * + * {@code + * { + * "" : "", + * "lang" : "", + * "options" : { + * "option0" : "", + * "option1" : "", + * ... + * }, + * "params" : { + * "param0" : "", + * "param1" : "", + * ... + * } + * } + * } + * + * Example: + * {@code + * { + * "inline" : "return Math.log(doc.popularity) * params.multiplier", + * "lang" : "painless", + * "params" : { + * "multiplier" : 100.0 + * } + * } + * } + * + * This also handles templates in a special way. If a complexly formatted query is specified as another complex + * JSON object the query is assumed to be a template, and the format will be preserved. + * + * {@code + * { + * "inline" : { "query" : ... }, + * "lang" : "", + * "options" : { + * "option0" : "", + * "option1" : "", + * ... + * }, + * "params" : { + * "param0" : "", + * "param1" : "", + * ... + * } + * } + * } + * + * @param parser The {@link XContentParser} to be used. + * @param matcher The {@link ParseFieldMatcher} to be used. + * @param defaultLang The default language to use if no language is specified. The default language isn't necessarily + * the one defined by {@link Script#DEFAULT_SCRIPT_LANG} due to backwards compatiblity requirements + * related to stored queries using previously default languauges. + * @return The parsed {@link Script}. + */ + public static Script parse(XContentParser parser, ParseFieldMatcher matcher, String defaultLang) throws IOException { + Objects.requireNonNull(defaultLang); + + Token token = parser.currentToken(); + + if (token == null) { + token = parser.nextToken(); + } + + if (token == Token.VALUE_STRING) { + return new Script(ScriptType.INLINE, defaultLang, parser.text(), Collections.emptyMap()); + } + + return PARSER.apply(parser, () -> matcher).build(defaultLang); + } + + private final ScriptType type; + private final String lang; + private final String idOrCode; + private final Map options; + private final Map params; + + /** + * Constructor for simple script using the default language and default type. + * @param idOrCode The id or code to use dependent on the default script type. + */ + public Script(String idOrCode) { + this(DEFAULT_SCRIPT_TYPE, DEFAULT_SCRIPT_LANG, idOrCode, Collections.emptyMap(), Collections.emptyMap()); + } + + /** + * Constructor for a script that does not need to use compiler options. + * @param type The {@link ScriptType}. + * @param lang The lang for this {@link Script}. + * @param idOrCode The id for this {@link Script} if the {@link ScriptType} is {@link ScriptType#FILE} or {@link ScriptType#STORED}. + * The code for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}. + * @param params The user-defined params to be bound for script execution. + */ + public Script(ScriptType type, String lang, String idOrCode, Map params) { + this(type, lang, idOrCode, Collections.emptyMap(), params); + } + + /** + * Constructor for a script that requires the use of compiler options. + * @param type The {@link ScriptType}. + * @param lang The lang for this {@link Script}. + * @param idOrCode The id for this {@link Script} if the {@link ScriptType} is {@link ScriptType#FILE} or {@link ScriptType#STORED}. + * The code for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}. + * @param options The options to be passed to the compiler for use at compile-time. + * @param params The user-defined params to be bound for script execution. + */ + public Script(ScriptType type, String lang, String idOrCode, Map options, Map params) { + this.idOrCode = Objects.requireNonNull(idOrCode); this.type = Objects.requireNonNull(type); - this.lang = lang == null ? DEFAULT_SCRIPT_LANG : lang; - this.params = (Map) params; - this.contentType = contentType; + this.lang = Objects.requireNonNull(lang); + this.options = Collections.unmodifiableMap(Objects.requireNonNull(options)); + this.params = Collections.unmodifiableMap(Objects.requireNonNull(params)); + + if (type != ScriptType.INLINE && !options.isEmpty()) { + throw new IllegalArgumentException( + "Compiler options [" + options + "] cannot be specified at runtime for [" + type + "] scripts."); + } } + /** + * Creates a {@link Script} read from an input stream. + */ public Script(StreamInput in) throws IOException { - script = in.readString(); - if (in.readBoolean()) { - type = ScriptType.readFrom(in); - } - lang = in.readOptionalString(); - params = in.readMap(); - if (in.readBoolean()) { - contentType = XContentType.readFrom(in); + // Version 5.1+ requires all Script members to be non-null and supports the potential + // for more options than just XContentType. Reorders the read in contents to be in + // same order as the constructor. + if (in.getVersion().onOrAfter(V_5_1_0_UNRELEASED)) { + this.type = ScriptType.readFrom(in); + this.lang = in.readString(); + this.idOrCode = in.readString(); + @SuppressWarnings("unchecked") + Map options = (Map)in.readMap(); + this.options = options; + this.params = in.readMap(); + // Prior to version 5.1 the script members are read in certain cases as optional and given + // default values when necessary. Also the only option supported is for XContentType. + } else { + String idOrCode = in.readString(); + ScriptType type; + + if (in.readBoolean()) { + type = ScriptType.readFrom(in); + } else { + type = DEFAULT_SCRIPT_TYPE; + } + + String lang = in.readOptionalString(); + + if (lang == null) { + lang = DEFAULT_SCRIPT_LANG; + } + + Map params = in.readMap(); + + if (params == null) { + params = new HashMap<>(); + } + + Map options = new HashMap<>(); + + if (in.readBoolean()) { + XContentType contentType = XContentType.readFrom(in); + options.put(CONTENT_TYPE_OPTION, contentType.mediaType()); + } + + this.type = type; + this.lang = lang; + this.idOrCode = idOrCode; + this.options = options; + this.params = params; } } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(script); - boolean hasType = type != null; - out.writeBoolean(hasType); - if (hasType) { + // Version 5.1+ requires all Script members to be non-null and supports the potential + // for more options than just XContentType. Reorders the written out contents to be in + // same order as the constructor. + if (out.getVersion().onOrAfter(V_5_1_0_UNRELEASED)) { type.writeTo(out); - } - out.writeOptionalString(lang); - out.writeMap(params); - boolean hasContentType = contentType != null; - out.writeBoolean(hasContentType); - if (hasContentType) { - XContentType.writeTo(contentType, out); + out.writeString(lang); + out.writeString(idOrCode); + @SuppressWarnings("unchecked") + Map options = (Map)this.options; + out.writeMap(options); + out.writeMap(params); + // Prior to version 5.1 the Script members were possibly written as optional or null, though this is no longer + // necessary since Script members cannot be null anymore, and there is no case where a null value wasn't equivalent + // to it's default value when actually compiling/executing a script. Meaning, there are no backwards compatibility issues, + // and now there's enforced consistency. Also the only supported compiler option was XContentType. + } else { + out.writeString(idOrCode); + out.writeBoolean(true); + type.writeTo(out); + out.writeBoolean(true); + out.writeString(lang); + out.writeMap(params.isEmpty() ? null : params); + + if (options.containsKey(CONTENT_TYPE_OPTION)) { + XContentType contentType = XContentType.fromMediaTypeOrFormat(options.get(CONTENT_TYPE_OPTION)); + out.writeBoolean(true); + contentType.writeTo(out); + } else { + out.writeBoolean(false); + } } } /** - * Method for getting the script. - * @return The cache key of the script to be compiled/executed. For dynamic scripts this is the actual - * script source code. For indexed scripts this is the id used in the request. For on disk scripts - * this is the file name. - */ - public String getScript() { - return script; - } - - /** - * Method for getting the type. + * This will build scripts into the following XContent structure: * - * @return The type of script -- inline, stored, or file. + * {@code + * { + * "" : "", + * "lang" : "", + * "options" : { + * "option0" : "", + * "option1" : "", + * ... + * }, + * "params" : { + * "param0" : "", + * "param1" : "", + * ... + * } + * } + * } + * + * Example: + * {@code + * { + * "inline" : "return Math.log(doc.popularity) * params.multiplier;", + * "lang" : "painless", + * "params" : { + * "multiplier" : 100.0 + * } + * } + * } + * + * Note that options and params will only be included if there have been any specified. + * + * This also handles templates in a special way. If the {@link Script#CONTENT_TYPE_OPTION} option + * is provided and the {@link ScriptType#INLINE} is specified then the template will be preserved as a raw field. + * + * {@code + * { + * "inline" : { "query" : ... }, + * "lang" : "", + * "options" : { + * "option0" : "", + * "option1" : "", + * ... + * }, + * "params" : { + * "param0" : "", + * "param1" : "", + * ... + * } + * } + * } + */ + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { + builder.startObject(); + + String contentType = options.get(CONTENT_TYPE_OPTION); + + if (type == ScriptType.INLINE && contentType != null && builder.contentType().mediaType().equals(contentType)) { + builder.rawField(type.getParseField().getPreferredName(), new BytesArray(idOrCode)); + } else { + builder.field(type.getParseField().getPreferredName(), idOrCode); + } + + builder.field(LANG_PARSE_FIELD.getPreferredName(), lang); + + if (!options.isEmpty()) { + builder.field(OPTIONS_PARSE_FIELD.getPreferredName(), options); + } + + if (!params.isEmpty()) { + builder.field(PARAMS_PARSE_FIELD.getPreferredName(), params); + } + + builder.endObject(); + + return builder; + } + + /** + * @return The id for this {@link Script} if the {@link ScriptType} is {@link ScriptType#FILE} or {@link ScriptType#STORED}. + * The code for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}. + */ + public String getIdOrCode() { + return idOrCode; + } + + /** + * @return The {@link ScriptType} for this {@link Script}. */ public ScriptType getType() { return type; } /** - * Method for getting language. - * - * @return The language of the script to be compiled/executed. + * @return The language for this {@link Script}. */ public String getLang() { return lang; } /** - * Method for getting the parameters. - * - * @return The map of parameters the script will be executed with. + * @return The map of compiler options for this {@link Script}. + */ + public Map getOptions() { + return options; + } + + /** + * @return The map of user-defined params for this {@link Script}. */ public Map getParams() { return params; } - /** - * @return The content type of the script if it is an inline script and the script has been defined as json - * or yaml content instead of a plain string. - */ - public XContentType getContentType() { - return contentType; - } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { - if (type == null) { - return builder.value(script); - } - builder.startObject(); - if (type == ScriptType.INLINE && contentType != null && builder.contentType() == contentType) { - builder.rawField(type.getParseField().getPreferredName(), new BytesArray(script)); - } else { - builder.field(type.getParseField().getPreferredName(), script); - } - if (lang != null) { - builder.field(ScriptField.LANG.getPreferredName(), lang); - } - if (params != null) { - builder.field(ScriptField.PARAMS.getPreferredName(), params); - } - builder.endObject(); - return builder; - } + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; - public static Script parse(XContentParser parser, ParseFieldMatcher matcher) { - return parse(parser, matcher, null); - } + Script script = (Script)o; - public static Script parse(XContentParser parser, QueryParseContext context) { - return parse(parser, context.getParseFieldMatcher(), null); - } - - public static Script parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, @Nullable String lang) { - try { - XContentParser.Token token = parser.currentToken(); - // If the parser hasn't yet been pushed to the first token, do it now - if (token == null) { - token = parser.nextToken(); - } - if (token == XContentParser.Token.VALUE_STRING) { - return new Script(parser.text(), ScriptType.INLINE, lang, null); - } - if (token != XContentParser.Token.START_OBJECT) { - throw new ElasticsearchParseException("expected a string value or an object, but found [{}] instead", token); - } - String script = null; - ScriptType type = null; - Map params = null; - XContentType contentType = null; - String cfn = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - cfn = parser.currentName(); - } else if (parseFieldMatcher.match(cfn, ScriptType.INLINE.getParseField())) { - type = ScriptType.INLINE; - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - contentType = parser.contentType(); - XContentBuilder builder = XContentFactory.contentBuilder(contentType); - script = builder.copyCurrentStructure(parser).bytes().utf8ToString(); - } else { - script = parser.text(); - } - } else if (parseFieldMatcher.match(cfn, ScriptType.FILE.getParseField())) { - type = ScriptType.FILE; - if (token == XContentParser.Token.VALUE_STRING) { - script = parser.text(); - } else { - throw new ElasticsearchParseException("expected a string value for field [{}], but found [{}]", cfn, token); - } - } else if (parseFieldMatcher.match(cfn, ScriptType.STORED.getParseField())) { - type = ScriptType.STORED; - if (token == XContentParser.Token.VALUE_STRING) { - script = parser.text(); - } else { - throw new ElasticsearchParseException("expected a string value for field [{}], but found [{}]", cfn, token); - } - } else if (parseFieldMatcher.match(cfn, ScriptField.LANG)) { - if (token == XContentParser.Token.VALUE_STRING) { - lang = parser.text(); - } else { - throw new ElasticsearchParseException("expected a string value for field [{}], but found [{}]", cfn, token); - } - } else if (parseFieldMatcher.match(cfn, ScriptField.PARAMS)) { - if (token == XContentParser.Token.START_OBJECT) { - params = parser.map(); - } else { - throw new ElasticsearchParseException("expected an object for field [{}], but found [{}]", cfn, token); - } - } else { - throw new ElasticsearchParseException("unexpected field [{}]", cfn); - } - } - if (script == null) { - throw new ElasticsearchParseException("expected one of [{}], [{}] or [{}] fields, but found none", - ScriptType.INLINE.getParseField() .getPreferredName(), ScriptType.FILE.getParseField().getPreferredName(), - ScriptType.STORED.getParseField() .getPreferredName()); - } - return new Script(script, type, lang, params, contentType); - } catch (IOException e) { - throw new ParsingException(parser.getTokenLocation(), "Error parsing [" + ScriptField.SCRIPT.getPreferredName() + "] field", e); - } + if (type != script.type) return false; + if (!lang.equals(script.lang)) return false; + if (!idOrCode.equals(script.idOrCode)) return false; + if (!options.equals(script.options)) return false; + return params.equals(script.params); } @Override public int hashCode() { - return Objects.hash(lang, params, script, type, contentType); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - Script other = (Script) obj; - - return Objects.equals(lang, other.lang) && - Objects.equals(params, other.params) && - Objects.equals(script, other.script) && - Objects.equals(type, other.type) && - Objects.equals(contentType, other.contentType); + int result = type.hashCode(); + result = 31 * result + lang.hashCode(); + result = 31 * result + idOrCode.hashCode(); + result = 31 * result + options.hashCode(); + result = 31 * result + params.hashCode(); + return result; } @Override public String toString() { - return "[script: " + script + ", type: " + type.getParseField().getPreferredName() + ", lang: " - + lang + ", params: " + params + ", contentType: " + contentType + "]"; + return "Script{" + + "type=" + type + + ", lang='" + lang + '\'' + + ", idOrCode='" + idOrCode + '\'' + + ", options=" + options + + ", params=" + params + + '}'; } - - public interface ScriptField { - ParseField SCRIPT = new ParseField("script"); - ParseField LANG = new ParseField("lang"); - ParseField PARAMS = new ParseField("params"); - } - } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index f38a213123e..1dc1cda0ada 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -274,9 +274,9 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust String lang = script.getLang(); ScriptType type = script.getType(); - //script.getScript() could return either a name or code for a script, + //script.getIdOrCode() could return either a name or code for a script, //but we check for a file script name first and an indexed script name second - String name = script.getScript(); + String name = script.getIdOrCode(); if (logger.isTraceEnabled()) { logger.trace("Compiling lang: [{}] type: [{}] script: {}", lang, type, name); @@ -296,8 +296,8 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust return compiledScript; } - //script.getScript() will be code if the script type is inline - String code = script.getScript(); + //script.getIdOrCode() will be code if the script type is inline + String code = script.getIdOrCode(); if (type == ScriptType.STORED) { //The look up for an indexed script must be done every time in case @@ -468,22 +468,22 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust /** * Compiles (or retrieves from cache) and executes the provided script */ - public ExecutableScript executable(Script script, ScriptContext scriptContext, Map params) { - return executable(compile(script, scriptContext, params), script.getParams()); + public ExecutableScript executable(Script script, ScriptContext scriptContext) { + return executable(compile(script, scriptContext, script.getOptions()), script.getParams()); } /** * Executes a previously compiled script provided as an argument */ - public ExecutableScript executable(CompiledScript compiledScript, Map vars) { - return getScriptEngineServiceForLang(compiledScript.lang()).executable(compiledScript, vars); + public ExecutableScript executable(CompiledScript compiledScript, Map params) { + return getScriptEngineServiceForLang(compiledScript.lang()).executable(compiledScript, params); } /** * Compiles (or retrieves from cache) and executes the provided search script */ - public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext, Map params) { - CompiledScript compiledScript = compile(script, scriptContext, params); + public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext) { + CompiledScript compiledScript = compile(script, scriptContext, script.getOptions()); return search(lookup, compiledScript, script.getParams()); } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index d6ef1e0c54d..9666df8cc56 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -769,8 +769,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } if (source.scriptFields() != null) { for (org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField field : source.scriptFields()) { - SearchScript searchScript = scriptService.search(context.lookup(), field.script(), ScriptContext.Standard.SEARCH, - Collections.emptyMap()); + SearchScript searchScript = scriptService.search(context.lookup(), field.script(), ScriptContext.Standard.SEARCH); context.scriptFields().add(new ScriptField(field.fieldName(), searchScript, field.ignoreFailure())); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java index b7635d3dc32..df25f0e2635 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.search.aggregations; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; @@ -69,12 +68,10 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, Na private final BigArrays bigArrays; private final ScriptService scriptService; - private final ClusterState clusterState; - public ReduceContext(BigArrays bigArrays, ScriptService scriptService, ClusterState clusterState) { + public ReduceContext(BigArrays bigArrays, ScriptService scriptService) { this.bigArrays = bigArrays; this.scriptService = scriptService; - this.clusterState = clusterState; } public BigArrays bigArrays() { @@ -84,10 +81,6 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, Na public ScriptService scriptService() { return scriptService; } - - public ClusterState clusterState() { - return clusterState; - } } protected final String name; @@ -126,7 +119,6 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, Na protected abstract void doWriteTo(StreamOutput out) throws IOException; - @Override public String getName() { return name; @@ -215,5 +207,4 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, Na public static final String TO = "to"; public static final String TO_AS_STRING = "to_as_string"; } - } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java index 748adb67ce5..9519a95d3a5 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java @@ -29,21 +29,19 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.support.XContentParseContext; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; -import java.util.Collections; import java.util.Objects; public class ScriptHeuristic extends SignificanceHeuristic { public static final String NAME = "script_heuristic"; private final Script script; - + // This class holds an executable form of the script with private variables ready for execution // on a single search thread. static class ExecutableScriptHeuristic extends ScriptHeuristic { @@ -72,7 +70,7 @@ public class ScriptHeuristic extends SignificanceHeuristic { supersetSizeHolder.value = supersetSize; subsetDfHolder.value = subsetFreq; supersetDfHolder.value = supersetFreq; - return ((Number) executableScript.run()).doubleValue(); + return ((Number) executableScript.run()).doubleValue(); } } @@ -94,12 +92,12 @@ public class ScriptHeuristic extends SignificanceHeuristic { @Override public SignificanceHeuristic rewrite(InternalAggregation.ReduceContext context) { - return new ExecutableScriptHeuristic(script, context.scriptService().executable(script, ScriptContext.Standard.AGGS, Collections.emptyMap())); + return new ExecutableScriptHeuristic(script, context.scriptService().executable(script, ScriptContext.Standard.AGGS)); } @Override public SignificanceHeuristic rewrite(SearchContext context) { - return new ExecutableScriptHeuristic(script, context.getQueryShardContext().getExecutableScript(script, ScriptContext.Standard.AGGS, Collections.emptyMap())); + return new ExecutableScriptHeuristic(script, context.getQueryShardContext().getExecutableScript(script, ScriptContext.Standard.AGGS)); } @@ -125,7 +123,7 @@ public class ScriptHeuristic extends SignificanceHeuristic { @Override public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { builder.startObject(NAME); - builder.field(ScriptField.SCRIPT.getPreferredName()); + builder.field(Script.SCRIPT_PARSE_FIELD.getPreferredName()); script.toXContent(builder, builderParams); builder.endObject(); return builder; @@ -159,7 +157,7 @@ public class ScriptHeuristic extends SignificanceHeuristic { if (token.equals(XContentParser.Token.FIELD_NAME)) { currentFieldName = parser.currentName(); } else { - if (context.matchField(currentFieldName, ScriptField.SCRIPT)) { + if (context.matchField(currentFieldName, Script.SCRIPT_PARSE_FIELD)) { script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage()); } else { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]", heuristicName, currentFieldName); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java index f044e94f05b..6e48d844c7d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java @@ -191,17 +191,15 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder QueryShardContext queryShardContext = context.searchContext().getQueryShardContext(); Function, ExecutableScript> executableInitScript; if (initScript != null) { - executableInitScript = queryShardContext.getLazyExecutableScript(initScript, ScriptContext.Standard.AGGS, - Collections.emptyMap()); + executableInitScript = queryShardContext.getLazyExecutableScript(initScript, ScriptContext.Standard.AGGS); } else { executableInitScript = (p) -> null;; } Function, SearchScript> searchMapScript = queryShardContext.getLazySearchScript(mapScript, - ScriptContext.Standard.AGGS, Collections.emptyMap()); + ScriptContext.Standard.AGGS); Function, ExecutableScript> executableCombineScript; if (combineScript != null) { - executableCombineScript = queryShardContext.getLazyExecutableScript(combineScript, ScriptContext.Standard.AGGS, - Collections.emptyMap()); + executableCombineScript = queryShardContext.getLazyExecutableScript(combineScript, ScriptContext.Standard.AGGS); } else { executableCombineScript = (p) -> null; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java index d6150fa8743..7cb74b9ecb7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java @@ -91,7 +91,7 @@ public class ScriptedMetricAggregatorFactory extends AggregatorFactory map = parser.map(); @@ -223,7 +222,7 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr } if (script == null) { - throw new ParsingException(parser.getTokenLocation(), "Missing required field [" + ScriptField.SCRIPT.getPreferredName() + throw new ParsingException(parser.getTokenLocation(), "Missing required field [" + Script.SCRIPT_PARSE_FIELD.getPreferredName() + "] for series_arithmetic aggregation [" + reducerName + "]"); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregationBuilder.java index e3b42376728..877be6ea54f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregationBuilder.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; -import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -119,7 +118,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg @Override protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { builder.field(BUCKETS_PATH.getPreferredName(), bucketsPathsMap); - builder.field(ScriptField.SCRIPT.getPreferredName(), script); + builder.field(Script.SCRIPT_PARSE_FIELD.getPreferredName(), script); builder.field(GAP_POLICY.getPreferredName(), gapPolicy.getName()); return builder; } @@ -141,7 +140,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg bucketsPathsMap.put("_value", parser.text()); } else if (context.getParseFieldMatcher().match(currentFieldName, GAP_POLICY)) { gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation()); - } else if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) { + } else if (context.getParseFieldMatcher().match(currentFieldName, Script.SCRIPT_PARSE_FIELD)) { script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage()); } else { throw new ParsingException(parser.getTokenLocation(), @@ -163,7 +162,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."); } } else if (token == XContentParser.Token.START_OBJECT) { - if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) { + if (context.getParseFieldMatcher().match(currentFieldName, Script.SCRIPT_PARSE_FIELD)) { script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage()); } else if (context.getParseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) { Map map = parser.map(); @@ -186,7 +185,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg } if (script == null) { - throw new ParsingException(parser.getTokenLocation(), "Missing required field [" + ScriptField.SCRIPT.getPreferredName() + throw new ParsingException(parser.getTokenLocation(), "Missing required field [" + Script.SCRIPT_PARSE_FIELD.getPreferredName() + "] for bucket_selector aggregation [" + reducerName + "]"); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java index e2fd8451ae6..7e8c5c1b271 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; -import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.search.aggregations.Aggregator; import org.joda.time.DateTimeZone; @@ -133,7 +132,7 @@ public abstract class AbstractValuesSourceParser "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); } } else if (scriptable && token == XContentParser.Token.START_OBJECT) { - if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) { + if (context.getParseFieldMatcher().match(currentFieldName, Script.SCRIPT_PARSE_FIELD)) { script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage()); } else if (!token(aggregationName, currentFieldName, token, parserContext, otherOptions)) { throw new ParsingException(parser.getTokenLocation(), diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java index f539a2a3b7f..1c06296f38a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java @@ -376,7 +376,7 @@ public abstract class ValuesSourceAggregationBuilder fields = hitContext.hit().fieldsOrNull(); if (fields == null) { fields = new HashMap<>(); @@ -59,8 +64,7 @@ public final class ParentFieldSubFetchPhase implements FetchSubPhase { try { SortedDocValues docValues = reader.getSortedDocValues(fieldMapper.name()); BytesRef parentId = docValues.get(docId); - assert parentId.length > 0; - return parentId.utf8ToString(); + return parentId.length > 0 ? parentId.utf8ToString() : null; } catch (IOException e) { throw ExceptionsHelper.convertToElastic(e); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java index 072b34d83de..e37446ba8ce 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java @@ -200,6 +200,10 @@ public class InternalSearchHit implements SearchHit { */ @Override public BytesReference sourceRef() { + if (this.source == null) { + return null; + } + try { this.source = CompressorFactory.uncompressIfNeeded(this.source); return this.source; @@ -245,7 +249,7 @@ public class InternalSearchHit implements SearchHit { @Override public boolean hasSource() { - return source == null; + return source != null; } @Override diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index bce30e844b5..6da93b26b8c 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -46,14 +46,12 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; -import java.util.Collections; import java.util.Locale; import java.util.Objects; @@ -218,7 +216,7 @@ public class ScriptSortBuilder extends SortBuilder { a -> new ScriptSortBuilder((Script) a[0], (ScriptSortType) a[1])); static { - PARSER.declareField(constructorArg(), Script::parse, ScriptField.SCRIPT, ValueType.OBJECT_OR_STRING); + PARSER.declareField(constructorArg(), Script::parse, Script.SCRIPT_PARSE_FIELD, ValueType.OBJECT_OR_STRING); PARSER.declareField(constructorArg(), p -> ScriptSortType.fromString(p.text()), TYPE_FIELD, ValueType.STRING); PARSER.declareString((b, v) -> b.order(SortOrder.fromString(v)), ORDER_FIELD); PARSER.declareString((b, v) -> b.sortMode(SortMode.fromString(v)), SORTMODE_FIELD); @@ -242,7 +240,7 @@ public class ScriptSortBuilder extends SortBuilder { @Override public SortFieldAndFormat build(QueryShardContext context) throws IOException { - final SearchScript searchScript = context.getSearchScript(script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); + final SearchScript searchScript = context.getSearchScript(script, ScriptContext.Standard.SEARCH); MultiValueMode valueMode = null; if (sortMode != null) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index 445924680b1..969b1c24d5c 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -393,7 +393,7 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder, ExecutableScript> compiledScript = context.getLazyExecutableScript(this.collateQuery, - ScriptContext.Standard.SEARCH, Collections.emptyMap()); + ScriptContext.Standard.SEARCH); suggestionContext.setCollateQueryScript(compiledScript); if (this.collateParams != null) { suggestionContext.setCollateScriptParams(this.collateParams); diff --git a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java index 9e4e0262080..086c48c4114 100644 --- a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -1231,8 +1231,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i } streamIn = compressor.streamInput(streamIn); } - if (version.onOrAfter(getCurrentVersion().minimumCompatibilityVersion()) == false - || version.major != getCurrentVersion().major) { + if (version.isCompatible(getCurrentVersion()) == false) { throw new IllegalStateException("Received message from unsupported version: [" + version + "] minimal compatible version is: [" + getCurrentVersion().minimumCompatibilityVersion() + "]"); } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 9ec3dc6b7fe..7b1c83d66aa 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -371,18 +371,13 @@ public class TransportService extends AbstractLifecycleComponent { if (checkClusterName && !Objects.equals(clusterName, response.clusterName)) { throw new IllegalStateException("handshake failed, mismatched cluster name [" + response.clusterName + "] - " + node); - } else if (!isVersionCompatible(response.version)) { + } else if (response.version.isCompatible((localNode != null ? localNode.getVersion() : Version.CURRENT)) == false) { throw new IllegalStateException("handshake failed, incompatible version [" + response.version + "] - " + node); } return response.discoveryNode; } - private boolean isVersionCompatible(Version version) { - return version.minimumCompatibilityVersion().equals( - localNode != null ? localNode.getVersion().minimumCompatibilityVersion() : Version.CURRENT.minimumCompatibilityVersion()); - } - static class HandshakeRequest extends TransportRequest { public static final HandshakeRequest INSTANCE = new HandshakeRequest(); @@ -955,6 +950,7 @@ public class TransportService extends AbstractLifecycleComponent { * are invoked we restore the context. */ private static final class ContextRestoreResponseHandler implements TransportResponseHandler { + private final TransportResponseHandler delegate; private final ThreadContext.StoredContext threadContext; @@ -984,6 +980,12 @@ public class TransportService extends AbstractLifecycleComponent { public String executor() { return delegate.executor(); } + + @Override + public String toString() { + return getClass().getName() + "/" + delegate.toString(); + } + } static class DirectResponseChannel implements TransportChannel { diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 999f036d9f4..1fa2043d547 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.1.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-6.3.0-snapshot-a66a445.jar}" { +grant codeBase "${codebase.lucene-core-6.3.0.jar}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) // java 8 package permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; @@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-core-6.3.0-snapshot-a66a445.jar}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; }; -grant codeBase "${codebase.lucene-misc-6.3.0-snapshot-a66a445.jar}" { +grant codeBase "${codebase.lucene-misc-6.3.0.jar}" { // needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper permission java.nio.file.LinkPermission "hard"; }; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 1c780f96933..9492b72d030 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -33,7 +33,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-6.3.0-snapshot-a66a445.jar}" { +grant codeBase "${codebase.lucene-test-framework-6.3.0.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // needed for testing hardlinks in StoreRecoveryTests since we install MockFS diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 8a2e965a7b4..7c8b9c9f2a6 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -57,7 +57,6 @@ import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.TranslogRecoveryPerformer; -import org.elasticsearch.indices.IndexTemplateAlreadyExistsException; import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.indices.InvalidIndexTemplateException; import org.elasticsearch.indices.recovery.RecoverFilesRecoveryException; @@ -336,16 +335,6 @@ public class ExceptionSerializationTests extends ESTestCase { assertTrue(ex.getCause() instanceof NullPointerException); } - public void testIndexTemplateAlreadyExistsException() throws IOException { - IndexTemplateAlreadyExistsException ex = serialize(new IndexTemplateAlreadyExistsException("the dude abides!")); - assertEquals("the dude abides!", ex.name()); - assertEquals("index_template [the dude abides!] already exists", ex.getMessage()); - - ex = serialize(new IndexTemplateAlreadyExistsException((String) null)); - assertNull(ex.name()); - assertEquals("index_template [null] already exists", ex.getMessage()); - } - public void testBatchOperationException() throws IOException { ShardId id = new ShardId("foo", "_na_", 1); TranslogRecoveryPerformer.BatchOperationException ex = serialize( @@ -683,11 +672,11 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(44, org.elasticsearch.indices.recovery.RecoveryFailedException.class); ids.put(45, org.elasticsearch.index.shard.IndexShardRelocatedException.class); ids.put(46, org.elasticsearch.transport.NodeShouldNotConnectException.class); - ids.put(47, org.elasticsearch.indices.IndexTemplateAlreadyExistsException.class); + ids.put(47, null); ids.put(48, org.elasticsearch.index.translog.TranslogCorruptedException.class); ids.put(49, org.elasticsearch.cluster.block.ClusterBlockException.class); ids.put(50, org.elasticsearch.search.fetch.FetchPhaseExecutionException.class); - ids.put(51, org.elasticsearch.index.IndexShardAlreadyExistsException.class); + ids.put(51, null); ids.put(52, org.elasticsearch.index.engine.VersionConflictEngineException.class); ids.put(53, org.elasticsearch.index.engine.EngineException.class); ids.put(54, null); // was DocumentAlreadyExistsException, which is superseded with VersionConflictEngineException diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java index 167a36a96d1..f8c31ee3189 100644 --- a/core/src/test/java/org/elasticsearch/VersionTests.java +++ b/core/src/test/java/org/elasticsearch/VersionTests.java @@ -20,12 +20,15 @@ package org.elasticsearch; import org.elasticsearch.action.ShardValidateQueryRequestTests; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.monitor.os.OsStats; import org.elasticsearch.index.query.SimpleQueryStringBuilder; +import org.elasticsearch.script.Script; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -131,6 +134,10 @@ public class VersionTests extends ESTestCase { assertThat(Version.V_2_2_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0)); assertThat(Version.V_2_3_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0)); assertThat(Version.V_5_0_0_alpha1.minimumCompatibilityVersion(), equalTo(Version.V_5_0_0_alpha1)); + // from 6.0 on we are supporting the latest minor of the previous major... this might fail once we add a new version ie. 5.x is + // released since we need to bump the supported minor in Version#minimumCompatibilityVersion() + assertThat("did you miss to bump the minor in Version#minimumCompatibilityVersion()", + Version.V_6_0_0_alpha1.minimumCompatibilityVersion(), equalTo(VersionUtils.getPreviousVersion(Version.V_6_0_0_alpha1))); } public void testToString() { @@ -219,7 +226,7 @@ public class VersionTests extends ESTestCase { assertTrue(constantName + " should be final", Modifier.isFinal(versionConstant.getModifiers())); Version v = (Version) versionConstant.get(Version.class); - logger.info("Checking {}", v); + logger.debug("Checking {}", v); assertEquals("Version id " + field.getName() + " does not point to " + constantName, v, Version.fromId(versionId)); assertEquals("Version " + constantName + " does not have correct id", versionId, v.id); if (v.major >= 2) { @@ -275,8 +282,10 @@ public class VersionTests extends ESTestCase { expectThrows(AssertionError.class, () -> assertUnknownVersion(Version.CURRENT)); assertUnknownVersion(AliasFilter.V_5_1_0); // once we released 5.1.0 and it's added to Version.java we need to remove this constant assertUnknownVersion(OsStats.V_5_1_0); // once we released 5.1.0 and it's added to Version.java we need to remove this constant - assertUnknownVersion(SimpleQueryStringBuilder.V_5_1_0_UNRELEASED); assertUnknownVersion(QueryStringQueryBuilder.V_5_1_0_UNRELEASED); + assertUnknownVersion(SimpleQueryStringBuilder.V_5_1_0_UNRELEASED); + // once we released 5.0.0 and it's added to Version.java we need to remove this constant + assertUnknownVersion(Script.V_5_1_0_UNRELEASED); // once we released 5.0.0 and it's added to Version.java we need to remove this constant } @@ -284,4 +293,18 @@ public class VersionTests extends ESTestCase { assertFalse("Version " + version + " has been releaed don't use a new instance of this version", VersionUtils.allVersions().contains(version)); } + + public void testIsCompatible() { + assertTrue(isCompatible(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion())); + assertTrue(isCompatible(Version.V_5_0_0, Version.V_6_0_0_alpha1)); + assertFalse(isCompatible(Version.V_2_0_0, Version.V_6_0_0_alpha1)); + assertFalse(isCompatible(Version.V_2_0_0, Version.V_5_0_0)); + } + + public boolean isCompatible(Version left, Version right) { + boolean result = left.isCompatible(right); + assert result == right.isCompatible(left); + return result; + } + } diff --git a/core/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/core/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 2c17cea5ef5..a0437f05c4c 100644 --- a/core/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/core/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -52,7 +52,7 @@ public class ActionModuleTests extends ESTestCase { public void testPluginCantOverwriteBuiltinAction() { ActionPlugin dupsMainAction = new ActionPlugin() { @Override - public List, ? extends ActionResponse>> getActions() { + public List> getActions() { return singletonList(new ActionHandler<>(MainAction.INSTANCE, TransportMainAction.class)); } }; @@ -61,7 +61,7 @@ public class ActionModuleTests extends ESTestCase { } public void testPluginCanRegisterAction() { - class FakeRequest extends ActionRequest { + class FakeRequest extends ActionRequest { @Override public ActionRequestValidationException validate() { return null; @@ -90,7 +90,7 @@ public class ActionModuleTests extends ESTestCase { FakeAction action = new FakeAction(); ActionPlugin registersFakeAction = new ActionPlugin() { @Override - public List, ? extends ActionResponse>> getActions() { + public List> getActions() { return singletonList(new ActionHandler<>(action, FakeTransportAction.class)); } }; diff --git a/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java b/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java index 75ba6f2b11c..e4a6eef33eb 100644 --- a/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java @@ -259,7 +259,7 @@ public class IndicesRequestIT extends ESIntegTestCase { String indexOrAlias = randomIndexOrAlias(); client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get(); UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id") - .script(new Script("ctx.op='delete'", ScriptType.INLINE, CustomScriptPlugin.NAME, Collections.emptyMap())); + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx.op='delete'", Collections.emptyMap())); UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet(); assertEquals(DocWriteResponse.Result.DELETED, updateResponse.getResult()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 340d7199234..28aff0d3bda 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -70,7 +70,7 @@ import static org.elasticsearch.test.ESTestCase.awaitBusy; public class TestTaskPlugin extends Plugin implements ActionPlugin { @Override - public List, ? extends ActionResponse>> getActions() { + public List> getActions() { return Arrays.asList(new ActionHandler<>(TestTaskAction.INSTANCE, TransportTestTaskAction.class), new ActionHandler<>(UnblockTestTasksAction.INSTANCE, TransportUnblockTestTasksAction.class)); } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index c8133ba7ba8..e1205840976 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -58,7 +58,8 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; */ public class ClusterRerouteRequestTests extends ESTestCase { private static final int ROUNDS = 30; - private final List> RANDOM_COMMAND_GENERATORS = unmodifiableList(Arrays.asList( + private final List> RANDOM_COMMAND_GENERATORS = unmodifiableList( + Arrays.> asList( () -> new AllocateReplicaAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), randomAsciiOfLengthBetween(2, 10)), () -> new AllocateEmptyPrimaryAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index f41fc698fc1..7d36ae14739 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.indices.InvalidIndexTemplateException; import org.elasticsearch.test.ESSingleNodeTestCase; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -50,7 +51,7 @@ import static org.hamcrest.CoreMatchers.instanceOf; public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { public void testIndexTemplateInvalidNumberOfShards() { PutRequest request = new PutRequest("test", "test_shards"); - request.template("test_shards*"); + request.patterns(Collections.singletonList("test_shards*")); Map map = new HashMap<>(); map.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "0"); @@ -69,7 +70,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { public void testIndexTemplateValidationAccumulatesValidationErrors() { PutRequest request = new PutRequest("test", "putTemplate shards"); - request.template("_test_shards*"); + request.patterns(Collections.singletonList("_test_shards*")); Map map = new HashMap<>(); map.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "0"); @@ -86,18 +87,18 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { public void testIndexTemplateWithAliasNameEqualToTemplatePattern() { PutRequest request = new PutRequest("api", "foobar_template"); - request.template("foobar"); + request.patterns(Arrays.asList("foo", "foobar")); request.aliases(Collections.singleton(new Alias("foobar"))); List errors = putTemplate(request); assertThat(errors.size(), equalTo(1)); assertThat(errors.get(0), instanceOf(IllegalArgumentException.class)); - assertThat(errors.get(0).getMessage(), equalTo("Alias [foobar] cannot be the same as the template pattern [foobar]")); + assertThat(errors.get(0).getMessage(), equalTo("Alias [foobar] cannot be the same as any pattern in [foo, foobar]")); } public void testIndexTemplateWithValidateEmptyMapping() throws Exception { PutRequest request = new PutRequest("api", "validate_template"); - request.template("validate_template"); + request.patterns(Collections.singletonList("validate_template")); request.putMapping("type1", "{}"); List errors = putTemplateDetail(request); @@ -108,7 +109,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { public void testIndexTemplateWithValidateMapping() throws Exception { PutRequest request = new PutRequest("api", "validate_template"); - request.template("te*"); + request.patterns(Collections.singletonList("te*")); request.putMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field2").field("type", "string").field("analyzer", "custom_1").endObject() .endObject().endObject().endObject().string()); @@ -121,7 +122,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { public void testBrokenMapping() throws Exception { PutRequest request = new PutRequest("api", "broken_mapping"); - request.template("te*"); + request.patterns(Collections.singletonList("te*")); request.putMapping("type1", "abcde"); List errors = putTemplateDetail(request); @@ -132,7 +133,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { public void testBlankMapping() throws Exception { PutRequest request = new PutRequest("api", "blank_mapping"); - request.template("te*"); + request.patterns(Collections.singletonList("te*")); request.putMapping("type1", "{}"); List errors = putTemplateDetail(request); @@ -144,7 +145,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { public void testAliasInvalidFilterInvalidJson() throws Exception { //invalid json: put index template fails PutRequest request = new PutRequest("api", "blank_mapping"); - request.template("te*"); + request.patterns(Collections.singletonList("te*")); request.putMapping("type1", "{}"); Set aliases = new HashSet<>(); aliases.add(new Alias("invalid_alias").filter("abcde")); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java new file mode 100644 index 00000000000..076245ad76d --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.indices.template.put; + +import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Base64; +import java.util.Collections; + +public class PutIndexTemplateRequestTests extends ESTestCase { + + // bwc for #21009 + public void testPutIndexTemplateRequest510() throws IOException { + PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("test"); + putRequest.patterns(Collections.singletonList("test*")); + putRequest.order(5); + + PutIndexTemplateRequest multiPatternRequest = new PutIndexTemplateRequest("test"); + multiPatternRequest.patterns(Arrays.asList("test*", "*test2", "*test3*")); + multiPatternRequest.order(5); + + // These bytes were retrieved by Base64 encoding the result of the above with 5_0_0 code. + // Note: Instead of a list for the template, in 5_0_0 the element was provided as a string. + String putRequestBytes = "ADwDAAR0ZXN0BXRlc3QqAAAABQAAAAAAAA=="; + BytesArray bytes = new BytesArray(Base64.getDecoder().decode(putRequestBytes)); + + try (StreamInput in = bytes.streamInput()) { + in.setVersion(Version.V_5_0_0); + PutIndexTemplateRequest readRequest = new PutIndexTemplateRequest(); + readRequest.readFrom(in); + assertEquals(putRequest.patterns(), readRequest.patterns()); + assertEquals(putRequest.order(), readRequest.order()); + + BytesStreamOutput output = new BytesStreamOutput(); + output.setVersion(Version.V_5_0_0); + readRequest.writeTo(output); + assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef()); + + // test that multi templates are reverse-compatible. + // for the bwc case, if multiple patterns, use only the first pattern seen. + output.reset(); + multiPatternRequest.writeTo(output); + assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef()); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 57aa0cbb9a4..a4a5f6f5ba6 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -89,7 +89,7 @@ public class BulkRequestTests extends ESTestCase { assertThat(((UpdateRequest) bulkRequest.requests().get(1)).index(), equalTo("index1")); Script script = ((UpdateRequest) bulkRequest.requests().get(1)).script(); assertThat(script, notNullValue()); - assertThat(script.getScript(), equalTo("counter += param1")); + assertThat(script.getIdOrCode(), equalTo("counter += param1")); assertThat(script.getLang(), equalTo("javascript")); Map scriptParams = script.getParams(); assertThat(scriptParams, notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java index 28b52eca316..82eee3554e8 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java @@ -122,7 +122,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { assertThat(bulkItemResponse.getIndex(), equalTo("test")); } - final Script script = new Script("ctx._source.field += 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + final Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.field += 1", Collections.emptyMap()); bulkResponse = client().prepareBulk() .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("1").setScript(script)) @@ -259,11 +259,14 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { bulkResponse = client().prepareBulk() .add(client().prepareUpdate().setIndex("test").setType("type1").setId("1").setFields("field") - .setScript(new Script("throw script exception on unknown var", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .setScript(new Script( + ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap()))) .add(client().prepareUpdate().setIndex("test").setType("type1").setId("2").setFields("field") - .setScript(new Script("ctx._source.field += 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .setScript(new Script( + ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.field += 1", Collections.emptyMap()))) .add(client().prepareUpdate().setIndex("test").setType("type1").setId("3").setFields("field") - .setScript(new Script("throw script exception on unknown var", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .setScript(new Script( + ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap()))) .execute().actionGet(); assertThat(bulkResponse.hasFailures(), equalTo(true)); @@ -291,7 +294,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { numDocs++; // this test needs an even num of docs } - final Script script = new Script("ctx._source.counter += 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + final Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.counter += 1", Collections.emptyMap()); BulkRequestBuilder builder = client().prepareBulk(); for (int i = 0; i < numDocs; i++) { @@ -380,7 +383,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { builder = client().prepareBulk(); for (int i = 0; i < numDocs; i++) { builder.add(client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i)) - .setScript(new Script("ctx.op = \"none\"", ScriptType.INLINE, CustomScriptPlugin.NAME, null))); + .setScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx.op = \"none\"", Collections.emptyMap()))); } response = builder.execute().actionGet(); assertThat(response.buildFailureMessage(), response.hasFailures(), equalTo(false)); @@ -396,7 +399,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { builder = client().prepareBulk(); for (int i = 0; i < numDocs; i++) { builder.add(client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i)) - .setScript(new Script("ctx.op = \"delete\"", ScriptType.INLINE, CustomScriptPlugin.NAME, null))); + .setScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx.op = \"delete\"", Collections.emptyMap()))); } response = builder.execute().actionGet(); assertThat(response.hasFailures(), equalTo(false)); diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 2778a9dbf47..253f0146634 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.search; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; -import org.elasticsearch.action.search.SearchPhaseController; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.BigArrays; @@ -57,7 +56,7 @@ public class SearchPhaseControllerTests extends ESTestCase { @Before public void setup() { - searchPhaseController = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null, null); + searchPhaseController = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); } public void testSort() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index bbf1d2f1942..228c68a1760 100644 --- a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -222,7 +222,7 @@ public class TransportActionFilterChainTests extends ESTestCase { RequestTestFilter testFilter = new RequestTestFilter(randomInt(), new RequestCallback() { @Override - public , Response extends ActionResponse> void execute(Task task, String action, Request request, + public void execute(Task task, String action, Request request, ActionListener listener, ActionFilterChain actionFilterChain) { for (int i = 0; i <= additionalContinueCount; i++) { actionFilterChain.proceed(task, action, request, listener); @@ -349,7 +349,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } @Override - public , Response extends ActionResponse> void apply(Task task, String action, Request request, + public void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain) { this.runs.incrementAndGet(); this.lastActionName = action; @@ -382,7 +382,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } @Override - public , Response extends ActionResponse> void apply(Task task, String action, Request request, + public void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain) { chain.proceed(task, action, request, listener); } @@ -400,7 +400,7 @@ public class TransportActionFilterChainTests extends ESTestCase { private static enum RequestOperation implements RequestCallback { CONTINUE_PROCESSING { @Override - public , Response extends ActionResponse> void execute(Task task, String action, Request request, + public void execute(Task task, String action, Request request, ActionListener listener, ActionFilterChain actionFilterChain) { actionFilterChain.proceed(task, action, request, listener); } @@ -408,14 +408,14 @@ public class TransportActionFilterChainTests extends ESTestCase { LISTENER_RESPONSE { @Override @SuppressWarnings("unchecked") // Safe because its all we test with - public , Response extends ActionResponse> void execute(Task task, String action, Request request, + public void execute(Task task, String action, Request request, ActionListener listener, ActionFilterChain actionFilterChain) { ((ActionListener) listener).onResponse(new TestResponse()); } }, LISTENER_FAILURE { @Override - public , Response extends ActionResponse> void execute(Task task, String action, Request request, + public void execute(Task task, String action, Request request, ActionListener listener, ActionFilterChain actionFilterChain) { listener.onFailure(new ElasticsearchTimeoutException("")); } @@ -448,7 +448,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } private interface RequestCallback { - , Response extends ActionResponse> void execute(Task task, String action, Request request, + void execute(Task task, String action, Request request, ActionListener listener, ActionFilterChain actionFilterChain); } @@ -457,7 +457,7 @@ public class TransportActionFilterChainTests extends ESTestCase { ActionFilterChain chain); } - public static class TestRequest extends ActionRequest { + public static class TestRequest extends ActionRequest { @Override public ActionRequestValidationException validate() { return null; diff --git a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index 73085276628..96ba5729cb8 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.discovery.zen.ElectMasterService; import org.elasticsearch.discovery.zen.FaultDetection; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.disruption.NetworkDisruption.NetworkDisconnect; import org.elasticsearch.test.disruption.NetworkDisruption.TwoPartitions; @@ -45,11 +46,6 @@ import static org.hamcrest.Matchers.equalTo; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class IndexingMasterFailoverIT extends ESIntegTestCase { - @Override - protected boolean addMockZenPings() { - return false; - } - @Override protected Collection> nodePlugins() { final HashSet> classes = new HashSet<>(super.nodePlugins()); @@ -57,6 +53,12 @@ public class IndexingMasterFailoverIT extends ESIntegTestCase { return classes; } + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false).build(); + } + /** * Indexing operations which entail mapping changes require a blocking request to the master node to update the mapping. * If the master node is being disrupted or if it cannot commit cluster state changes, it needs to retry within timeout limits. diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index 97da0aa8522..10e9d9d3f3d 100644 --- a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -106,12 +106,17 @@ public class TransportNodesActionTests extends ESTestCase { public void testNewResponse() { TestTransportNodesAction action = getTestTransportNodesAction(); TestNodesRequest request = new TestNodesRequest(); - List expectedNodeResponses = mockList(TestNodeResponse.class, randomIntBetween(0, 2)); + List expectedNodeResponses = mockList(TestNodeResponse::new, randomIntBetween(0, 2)); expectedNodeResponses.add(new TestNodeResponse()); List nodeResponses = new ArrayList<>(expectedNodeResponses); // This should be ignored: nodeResponses.add(new OtherNodeResponse()); - List failures = mockList(FailedNodeException.class, randomIntBetween(0, 2)); + List failures = mockList( + () -> new FailedNodeException( + randomAsciiOfLength(8), + randomAsciiOfLength(8), + new IllegalStateException(randomAsciiOfLength(8))), + randomIntBetween(0, 2)); List allResponses = new ArrayList<>(expectedNodeResponses); allResponses.addAll(failures); @@ -141,10 +146,10 @@ public class TransportNodesActionTests extends ESTestCase { assertEquals(clusterService.state().nodes().getDataNodes().size(), capturedRequests.size()); } - private List mockList(Class clazz, int size) { + private List mockList(Supplier supplier, int size) { List failures = new ArrayList<>(size); for (int i = 0; i < size; ++i) { - failures.add(mock(clazz)); + failures.add(supplier.get()); } return failures; } diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 942849bd171..3b27bbff9ce 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -65,11 +65,11 @@ public class UpdateRequestTests extends ESTestCase { .endObject()); Script script = request.script(); assertThat(script, notNullValue()); - assertThat(script.getScript(), equalTo("script1")); + assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getType(), equalTo(ScriptType.INLINE)); assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); Map params = script.getParams(); - assertThat(params, nullValue()); + assertThat(params, equalTo(Collections.emptyMap())); // simple verbose script request.fromXContent(XContentFactory.jsonBuilder().startObject() @@ -77,11 +77,11 @@ public class UpdateRequestTests extends ESTestCase { .endObject()); script = request.script(); assertThat(script, notNullValue()); - assertThat(script.getScript(), equalTo("script1")); + assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getType(), equalTo(ScriptType.INLINE)); assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); params = script.getParams(); - assertThat(params, nullValue()); + assertThat(params, equalTo(Collections.emptyMap())); // script with params request = new UpdateRequest("test", "type", "1"); @@ -94,7 +94,7 @@ public class UpdateRequestTests extends ESTestCase { .endObject().endObject()); script = request.script(); assertThat(script, notNullValue()); - assertThat(script.getScript(), equalTo("script1")); + assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getType(), equalTo(ScriptType.INLINE)); assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); params = script.getParams(); @@ -108,7 +108,7 @@ public class UpdateRequestTests extends ESTestCase { .field("inline", "script1").endObject().endObject()); script = request.script(); assertThat(script, notNullValue()); - assertThat(script.getScript(), equalTo("script1")); + assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getType(), equalTo(ScriptType.INLINE)); assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); params = script.getParams(); @@ -133,7 +133,7 @@ public class UpdateRequestTests extends ESTestCase { .endObject().endObject()); script = request.script(); assertThat(script, notNullValue()); - assertThat(script.getScript(), equalTo("script1")); + assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getType(), equalTo(ScriptType.INLINE)); assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); params = script.getParams(); @@ -160,7 +160,7 @@ public class UpdateRequestTests extends ESTestCase { .endObject().endObject()); script = request.script(); assertThat(script, notNullValue()); - assertThat(script.getScript(), equalTo("script1")); + assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getType(), equalTo(ScriptType.INLINE)); assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); params = script.getParams(); @@ -325,7 +325,7 @@ public class UpdateRequestTests extends ESTestCase { { UpdateRequest updateRequest = new UpdateRequest("test", "type1", "2") .upsert(indexRequest) - .script(new Script("ctx._source.update_timestamp = ctx._now", ScriptType.INLINE, "mock", Collections.emptyMap())) + .script(new Script(ScriptType.INLINE, "mock", "ctx._source.update_timestamp = ctx._now", Collections.emptyMap())) .scriptedUpsert(true); long nowInMillis = randomPositiveLong(); // We simulate that the document is not existing yet @@ -339,7 +339,7 @@ public class UpdateRequestTests extends ESTestCase { { UpdateRequest updateRequest = new UpdateRequest("test", "type1", "2") .upsert(indexRequest) - .script(new Script("ctx._timestamp = ctx._now", ScriptType.INLINE, "mock", Collections.emptyMap())) + .script(new Script(ScriptType.INLINE, "mock", "ctx._timestamp = ctx._now", Collections.emptyMap())) .scriptedUpsert(true); long nowInMillis = randomPositiveLong(); // We simulate that the document is not existing yet diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java index b60e6f01268..477a40b435a 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java @@ -592,12 +592,15 @@ public class BootstrapCheckTests extends ESTestCase { BootstrapCheck.check(true, Collections.singletonList(nonOracleCheck), "testG1GCCheck"); final BootstrapCheck.G1GCCheck nonJava8Check = new BootstrapCheck.G1GCCheck() { + @Override boolean isJava8() { return false; } + }; - // if not java 8, nothing should happen + + // if not Java 8, nothing should happen BootstrapCheck.check(true, Collections.singletonList(nonJava8Check), "testG1GCCheck"); } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java index eb6648cad02..fd78fc147fa 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java @@ -23,11 +23,11 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.service.ClusterStateStatus; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESBackcompatTestCase; import org.elasticsearch.transport.MockTransportClient; @@ -50,7 +50,6 @@ public class ClusterStateBackwardsCompatIT extends ESBackcompatTestCase { tc.addTransportAddress(n.getNode().getAddress()); ClusterStateResponse response = tc.admin().cluster().prepareState().execute().actionGet(); - assertThat(response.getState().status(), equalTo(ClusterState.ClusterStateStatus.UNKNOWN)); assertNotNull(response.getClusterName()); assertTrue(response.getState().getMetaData().hasIndex("test")); } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 3009f7d5c3b..0ad8c2dfff1 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.Version; +import org.elasticsearch.VersionTests; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.admin.indices.segments.IndexSegments; @@ -84,6 +85,7 @@ import java.util.TreeSet; import static org.elasticsearch.test.OldIndexUtils.assertUpgradeWorks; import static org.elasticsearch.test.OldIndexUtils.getIndexDir; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.greaterThanOrEqualTo; // needs at least 2 nodes since it bumps replicas to 1 @@ -244,6 +246,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { assertUpgradeWorks(client(), indexName, version); assertDeleteByQueryWorked(indexName, version); assertPositionIncrementGapDefaults(indexName, version); + assertAliasWithBadName(indexName, version); unloadIndex(indexName); } @@ -429,6 +432,31 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { } } + private static final Version VERSION_5_1_0_UNRELEASED = Version.fromString("5.1.0"); + + public void testUnreleasedVersion() { + VersionTests.assertUnknownVersion(VERSION_5_1_0_UNRELEASED); + } + + /** + * Search on an alias that contains illegal characters that would prevent it from being created after 5.1.0. It should still be + * search-able though. + */ + void assertAliasWithBadName(String indexName, Version version) throws Exception { + if (version.onOrAfter(VERSION_5_1_0_UNRELEASED)) { + return; + } + // We can read from the alias just like we can read from the index. + String aliasName = "#" + indexName; + long totalDocs = client().prepareSearch(indexName).setSize(0).get().getHits().totalHits(); + assertHitCount(client().prepareSearch(aliasName).setSize(0).get(), totalDocs); + assertThat(totalDocs, greaterThanOrEqualTo(2000L)); + + // We can remove the alias. + assertAcked(client().admin().indices().prepareAliases().removeAlias(indexName, aliasName).get()); + assertFalse(client().admin().indices().prepareAliasesExist(aliasName).get().exists()); + } + private Path getNodeDir(String indexFile) throws IOException { Path unzipDir = createTempDir(); Path unzipDataDir = unzipDir.resolve("data"); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index 6ed3d64e46e..df49e77a515 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -46,6 +46,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.SortedSet; @@ -224,7 +225,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { logger.info("--> check templates"); IndexTemplateMetaData template = clusterState.getMetaData().templates().get("template_" + version.toLowerCase(Locale.ROOT)); assertThat(template, notNullValue()); - assertThat(template.template(), equalTo("te*")); + assertThat(template.patterns(), equalTo(Collections.singletonList("te*"))); assertThat(template.settings().getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1), equalTo(1)); assertThat(template.mappings().size(), equalTo(1)); assertThat(template.mappings().get("type1").string(), equalTo("{\"type1\":{\"_source\":{\"enabled\":false}}}")); diff --git a/core/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java b/core/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java index 360137b8904..35406ef1153 100644 --- a/core/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java +++ b/core/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java @@ -38,7 +38,7 @@ public class ParentTaskAssigningClientTests extends ESTestCase { // This mock will do nothing but verify that parentTaskId is set on all requests sent to it. NoOpClient mock = new NoOpClient(getTestName()) { @Override - protected < Request extends ActionRequest, + protected < Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder > void doExecute( Action action, Request request, diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java index dbb066dcb1b..0772e87d900 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java @@ -19,6 +19,9 @@ package org.elasticsearch.client.transport; +import java.io.IOException; +import java.util.Arrays; + import org.elasticsearch.Version; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -32,14 +35,11 @@ import org.elasticsearch.node.NodeValidationException; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.discovery.MockZenPing; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.transport.MockTcpTransportPlugin; import org.elasticsearch.transport.MockTransportClient; import org.elasticsearch.transport.TransportService; -import java.io.IOException; -import java.util.Arrays; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; @@ -66,7 +66,7 @@ public class TransportClientIT extends ESIntegTestCase { .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put(Node.NODE_DATA_SETTING.getKey(), false) .put("cluster.name", "foobar") - .build(), Arrays.asList(MockTcpTransportPlugin.class, MockZenPing.TestPlugin.class)).start()) { + .build(), Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class)).start()) { TransportAddress transportAddress = node.injector().getInstance(TransportService.class).boundAddress().publishAddress(); client.addTransportAddress(transportAddress); // since we force transport clients there has to be one node started that we connect to. diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 3cafff08a07..111a3b1fe10 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -93,7 +93,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { } @Override - protected boolean apply(String action, ActionRequest request, ActionListener listener) { + protected boolean apply(String action, ActionRequest request, ActionListener listener) { if (blockedActions.contains(action)) { throw new ElasticsearchException("force exception on [" + action + "]"); } diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index c77d7c10c96..9d44dbbca38 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoriesMetaData; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -56,6 +57,7 @@ import org.elasticsearch.test.ESIntegTestCase; import java.util.Collections; import java.util.List; +import java.util.Set; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -238,13 +240,19 @@ public class ClusterStateDiffIT extends ESIntegTestCase { for (int i = 0; i < shardCount; i++) { IndexShardRoutingTable.Builder indexShard = new IndexShardRoutingTable.Builder(new ShardId(index, "_na_", i)); int replicaCount = randomIntBetween(1, 10); + Set availableNodeIds = Sets.newHashSet(nodeIds); for (int j = 0; j < replicaCount; j++) { UnassignedInfo unassignedInfo = null; if (randomInt(5) == 1) { unassignedInfo = new UnassignedInfo(randomReason(), randomAsciiOfLength(10)); } + if (availableNodeIds.isEmpty()) { + break; + } + String nodeId = randomFrom(availableNodeIds); + availableNodeIds.remove(nodeId); indexShard.addShard( - TestShardRouting.newShardRouting(index, i, randomFrom(nodeIds), null, j == 0, + TestShardRouting.newShardRouting(index, i, nodeId, null, j == 0, ShardRoutingState.fromValue((byte) randomIntBetween(2, 3)), unassignedInfo)); } builder.addIndexShard(indexShard.build()); @@ -258,8 +266,20 @@ public class ClusterStateDiffIT extends ESIntegTestCase { private IndexRoutingTable randomChangeToIndexRoutingTable(IndexRoutingTable original, String[] nodes) { IndexRoutingTable.Builder builder = IndexRoutingTable.builder(original.getIndex()); for (ObjectCursor indexShardRoutingTable : original.shards().values()) { + Set availableNodes = Sets.newHashSet(nodes); for (ShardRouting shardRouting : indexShardRoutingTable.value.shards()) { - final ShardRouting updatedShardRouting = randomChange(shardRouting, nodes); + availableNodes.remove(shardRouting.currentNodeId()); + if (shardRouting.relocating()) { + availableNodes.remove(shardRouting.relocatingNodeId()); + } + } + + for (ShardRouting shardRouting : indexShardRoutingTable.value.shards()) { + final ShardRouting updatedShardRouting = randomChange(shardRouting, availableNodes); + availableNodes.remove(updatedShardRouting.currentNodeId()); + if (shardRouting.relocating()) { + availableNodes.remove(updatedShardRouting.relocatingNodeId()); + } builder.addShard(updatedShardRouting); } } @@ -553,7 +573,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { public IndexTemplateMetaData randomCreate(String name) { IndexTemplateMetaData.Builder builder = IndexTemplateMetaData.builder(name); builder.order(randomInt(1000)) - .template(randomName("temp")) + .patterns(Collections.singletonList(randomName("temp"))) .settings(randomSettings(Settings.EMPTY)); int aliasCount = randomIntBetween(0, 10); for (int i = 0; i < aliasCount; i++) { diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index fd68e484062..3e58291d4ad 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -36,6 +36,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.disruption.NetworkDisruption.NetworkDelay; import org.elasticsearch.test.disruption.NetworkDisruption.TwoPartitions; @@ -74,8 +75,9 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { } @Override - protected boolean addMockZenPings() { - return false; + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false).build(); } public void testSimpleMinimumMasterNodes() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java index fac8d5f7b63..f73043ce4e4 100644 --- a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -40,6 +40,8 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import java.util.Collections; + import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertExists; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; @@ -122,12 +124,14 @@ public class NoMasterNodeIT extends ESIntegTestCase { checkWriteAction( false, timeout, client().prepareUpdate("test", "type1", "1") - .setScript(new Script("test script", ScriptType.INLINE, null, null)).setTimeout(timeout)); + .setScript(new Script( + ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", Collections.emptyMap())).setTimeout(timeout)); checkWriteAction( autoCreateIndex, timeout, client().prepareUpdate("no_index", "type1", "1") - .setScript(new Script("test script", ScriptType.INLINE, null, null)).setTimeout(timeout)); + .setScript(new Script( + ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", Collections.emptyMap())).setTimeout(timeout)); checkWriteAction(false, timeout, diff --git a/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java b/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java index c396350f9de..1944ed6e3bc 100644 --- a/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java @@ -40,6 +40,8 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.CollectionAssertions; import org.junit.Before; +import java.util.Collections; + import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertIndexTemplateExists; import static org.hamcrest.Matchers.equalTo; @@ -92,7 +94,7 @@ public class SimpleClusterStateIT extends ESIntegTestCase { public void testIndexTemplates() throws Exception { client().admin().indices().preparePutTemplate("foo_template") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() @@ -101,7 +103,7 @@ public class SimpleClusterStateIT extends ESIntegTestCase { .get(); client().admin().indices().preparePutTemplate("fuu_template") - .setTemplate("test*") + .setPatterns(Collections.singletonList("test*")) .setOrder(1) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field2").field("type", "text").field("store", "no").endObject() diff --git a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index eb5c88d7e83..feaeee703b6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -353,7 +353,7 @@ public class ClusterStateHealthTests extends ESTestCase { final int numberOfReplicas, final boolean withPrimaryAllocationFailures) { // generate random node ids - final List nodeIds = new ArrayList<>(); + final Set nodeIds = new HashSet<>(); final int numNodes = randomIntBetween(numberOfReplicas + 1, 10); for (int i = 0; i < numNodes; i++) { nodeIds.add(randomAsciiOfLength(8)); @@ -372,7 +372,7 @@ public class ClusterStateHealthTests extends ESTestCase { for (final ShardRouting shardRouting : shardRoutingTable.getShards()) { if (shardRouting.primary()) { newIndexRoutingTable.addShard( - shardRouting.initialize(nodeIds.get(randomIntBetween(0, numNodes - 1)), null, shardRouting.getExpectedShardSize()) + shardRouting.initialize(randomFrom(nodeIds), null, shardRouting.getExpectedShardSize()) ); } else { newIndexRoutingTable.addShard(shardRouting); @@ -460,17 +460,15 @@ public class ClusterStateHealthTests extends ESTestCase { newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId(); + Set allocatedNodes = new HashSet<>(); + allocatedNodes.add(primaryNodeId); for (final ShardRouting shardRouting : shardRoutingTable.getShards()) { if (shardRouting.primary() == false) { // give the replica a different node id than the primary - final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId(); - String replicaNodeId; - do { - replicaNodeId = nodeIds.get(randomIntBetween(0, numNodes - 1)); - } while (primaryNodeId.equals(replicaNodeId)); - newIndexRoutingTable.addShard( - shardRouting.initialize(replicaNodeId, null, shardRouting.getExpectedShardSize()) - ); + String replicaNodeId = randomFrom(Sets.difference(nodeIds, allocatedNodes)); + newIndexRoutingTable.addShard(shardRouting.initialize(replicaNodeId, null, shardRouting.getExpectedShardSize())); + allocatedNodes.add(replicaNodeId); } else { newIndexRoutingTable.addShard(shardRouting); } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/AliasValidatorTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/AliasValidatorTests.java new file mode 100644 index 00000000000..05bd9eeab8c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/AliasValidatorTests.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.InvalidAliasNameException; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.startsWith; + +public class AliasValidatorTests extends ESTestCase { + public void testValidatesAliasNames() { + AliasValidator validator = new AliasValidator(Settings.EMPTY); + Exception e = expectThrows(InvalidAliasNameException.class, () -> validator.validateAliasStandalone(".", null)); + assertEquals("Invalid alias name [.]: must not be '.' or '..'", e.getMessage()); + e = expectThrows(InvalidAliasNameException.class, () -> validator.validateAliasStandalone("..", null)); + assertEquals("Invalid alias name [..]: must not be '.' or '..'", e.getMessage()); + e = expectThrows(InvalidAliasNameException.class, () -> validator.validateAliasStandalone("_cat", null)); + assertEquals("Invalid alias name [_cat]: must not start with '_', '-', or '+'", e.getMessage()); + e = expectThrows(InvalidAliasNameException.class, () -> validator.validateAliasStandalone("-cat", null)); + assertEquals("Invalid alias name [-cat]: must not start with '_', '-', or '+'", e.getMessage()); + e = expectThrows(InvalidAliasNameException.class, () -> validator.validateAliasStandalone("+cat", null)); + assertEquals("Invalid alias name [+cat]: must not start with '_', '-', or '+'", e.getMessage()); + e = expectThrows(InvalidAliasNameException.class, () -> validator.validateAliasStandalone("c*t", null)); + assertThat(e.getMessage(), startsWith("Invalid alias name [c*t]: must not contain the following characters ")); + + // Doesn't throw an exception because we allow upper case alias names + validator.validateAliasStandalone("CAT", null); + } +} diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java index 5fef33be388..faf34bd3d05 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java new file mode 100644 index 00000000000..5265a7548a6 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Base64; +import java.util.Collections; + +import static org.elasticsearch.cluster.metadata.AliasMetaData.newAliasMetaDataBuilder; + +public class IndexTemplateMetaDataTests extends ESTestCase { + + // bwc for #21009 + public void testIndexTemplateMetaData510() throws IOException { + IndexTemplateMetaData metaData = IndexTemplateMetaData.builder("foo") + .patterns(Collections.singletonList("bar")) + .order(1) + .settings(Settings.builder() + .put("setting1", "value1") + .put("setting2", "value2")) + .putAlias(newAliasMetaDataBuilder("alias-bar1")).build(); + + IndexTemplateMetaData multiMetaData = IndexTemplateMetaData.builder("foo") + .patterns(Arrays.asList("bar", "foo")) + .order(1) + .settings(Settings.builder() + .put("setting1", "value1") + .put("setting2", "value2")) + .putAlias(newAliasMetaDataBuilder("alias-bar1")).build(); + + // These bytes were retrieved by Base64 encoding the result of the above with 5_0_0 code + String templateBytes = "A2ZvbwAAAAEDYmFyAghzZXR0aW5nMQEGdmFsdWUxCHNldHRpbmcyAQZ2YWx1ZTIAAQphbGlhcy1iYXIxAAAAAAA="; + BytesArray bytes = new BytesArray(Base64.getDecoder().decode(templateBytes)); + + try (StreamInput in = bytes.streamInput()) { + in.setVersion(Version.V_5_0_0); + IndexTemplateMetaData readMetaData = IndexTemplateMetaData.Builder.readFrom(in); + assertEquals(0, in.available()); + assertEquals(metaData.getName(), readMetaData.getName()); + assertEquals(metaData.getPatterns(), readMetaData.getPatterns()); + assertTrue(metaData.aliases().containsKey("alias-bar1")); + assertEquals(1, metaData.aliases().size()); + + BytesStreamOutput output = new BytesStreamOutput(); + output.setVersion(Version.V_5_0_0); + readMetaData.writeTo(output); + assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef()); + + // test that multi templates are reverse-compatible. + // for the bwc case, if multiple patterns, use only the first pattern seen. + output.reset(); + multiMetaData.writeTo(output); + assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef()); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java index c6ce30e2a52..1dfd3fd33e6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java @@ -18,11 +18,17 @@ */ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.util.Collections; + import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -63,4 +69,34 @@ public class MetaDataMappingServiceTests extends ESSingleNodeTestCase { assertThat(documentMapper.parentFieldMapper().active(), is(true)); } + public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Exception { + final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); + final CompressedXContent currentMapping = indexService.mapperService().documentMapper("type").mappingSource(); + + final MetaDataMappingService mappingService = getInstanceFromNode(MetaDataMappingService.class); + final ClusterService clusterService = getInstanceFromNode(ClusterService.class); + // TODO - it will be nice to get a random mapping generator + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); + request.source("{ \"properties\" { \"field\": { \"type\": \"string\" }}}"); + mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request)); + assertThat(indexService.mapperService().documentMapper("type").mappingSource(), equalTo(currentMapping)); + } + + public void testClusterStateIsNotChangedWithIdenticalMappings() throws Exception { + createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); + + final MetaDataMappingService mappingService = getInstanceFromNode(MetaDataMappingService.class); + final ClusterService clusterService = getInstanceFromNode(ClusterService.class); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); + request.source("{ \"properties\" { \"field\": { \"type\": \"string\" }}}"); + ClusterState result = mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request)) + .resultingState; + + assertFalse(result != clusterService.state()); + + ClusterState result2 = mappingService.putMappingExecutor.execute(result, Collections.singletonList(request)) + .resultingState; + + assertSame(result, result2); + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java index 3d3da02822a..47f420e1bea 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.Collections; import static org.elasticsearch.cluster.metadata.AliasMetaData.newAliasMetaDataBuilder; import static org.hamcrest.Matchers.equalTo; @@ -33,6 +34,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; public class ToAndFromJsonMetaDataTests extends ESTestCase { + public void testSimpleJsonFromAndTo() throws IOException { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test1") @@ -113,7 +115,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase { .putAlias(newAliasMetaDataBuilder("alias2")) .putAlias(newAliasMetaDataBuilder("alias4").filter(ALIAS_FILTER2))) .put(IndexTemplateMetaData.builder("foo") - .template("bar") + .patterns(Collections.singletonList("bar")) .order(1) .settings(Settings.builder() .put("setting1", "value1") @@ -134,7 +136,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase { .putAlias(newAliasMetaDataBuilder("alias2")) .putAlias(newAliasMetaDataBuilder("alias4").filter(ALIAS_FILTER2))) .put(IndexTemplateMetaData.builder("foo") - .template("bar") + .patterns(Collections.singletonList("bar")) .order(1) .settings(Settings.builder() .put("setting1", "value1") @@ -292,7 +294,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase { // templates assertThat(parsedMetaData.templates().get("foo").name(), is("foo")); - assertThat(parsedMetaData.templates().get("foo").template(), is("bar")); + assertThat(parsedMetaData.templates().get("foo").patterns(), is(Collections.singletonList("bar"))); assertThat(parsedMetaData.templates().get("foo").settings().get("index.setting1"), is("value1")); assertThat(parsedMetaData.templates().get("foo").settings().getByPrefix("index.").get("setting2"), is("value2")); assertThat(parsedMetaData.templates().get("foo").aliases().size(), equalTo(3)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index 0d284a1e47e..93326e54db9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.disruption.NetworkDisruption.NetworkDisconnect; import org.elasticsearch.test.disruption.NetworkDisruption.TwoPartitions; @@ -62,8 +63,9 @@ public class PrimaryAllocationIT extends ESIntegTestCase { } @Override - protected boolean addMockZenPings() { - return false; + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false).build(); } private void createStaleReplicaScenario() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java b/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java index c3064c7fa9d..69773e99921 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java @@ -19,6 +19,8 @@ package org.elasticsearch.cluster.routing; +import java.util.Set; + import static org.elasticsearch.test.ESTestCase.randomAsciiOfLength; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomInt; @@ -31,7 +33,7 @@ public final class RandomShardRoutingMutator { } - public static ShardRouting randomChange(ShardRouting shardRouting, String[] nodes) { + public static ShardRouting randomChange(ShardRouting shardRouting, Set nodes) { switch (randomInt(2)) { case 0: if (shardRouting.unassigned() == false && shardRouting.primary() == false) { @@ -42,7 +44,7 @@ public final class RandomShardRoutingMutator { } break; case 1: - if (shardRouting.unassigned()) { + if (shardRouting.unassigned() && nodes.isEmpty() == false) { shardRouting = shardRouting.initialize(randomFrom(nodes), null, -1); } break; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java index 6ed42ee45aa..e26fece7c6d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java @@ -28,9 +28,12 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.node.DiscoveryNodes.Builder; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; import org.junit.Before; +import java.util.Arrays; import java.util.Set; import java.util.stream.Collectors; @@ -328,6 +331,19 @@ public class RoutingTableTests extends ESAllocationTestCase { expectThrows(IllegalStateException.class, () -> indexRoutingTable.validate(metaData4)); } + public void testDistinctNodes() { + ShardId shardId = new ShardId(new Index("index", "uuid"), 0); + ShardRouting routing1 = TestShardRouting.newShardRouting(shardId, "node1", randomBoolean(), ShardRoutingState.STARTED); + ShardRouting routing2 = TestShardRouting.newShardRouting(shardId, "node2", randomBoolean(), ShardRoutingState.STARTED); + ShardRouting routing3 = TestShardRouting.newShardRouting(shardId, "node1", randomBoolean(), ShardRoutingState.STARTED); + ShardRouting routing4 = TestShardRouting.newShardRouting(shardId, "node3", "node2", randomBoolean(), ShardRoutingState.RELOCATING); + assertTrue(IndexShardRoutingTable.Builder.distinctNodes(Arrays.asList(routing1, routing2))); + assertFalse(IndexShardRoutingTable.Builder.distinctNodes(Arrays.asList(routing1, routing3))); + assertFalse(IndexShardRoutingTable.Builder.distinctNodes(Arrays.asList(routing1, routing2, routing3))); + assertTrue(IndexShardRoutingTable.Builder.distinctNodes(Arrays.asList(routing1, routing4))); + assertFalse(IndexShardRoutingTable.Builder.distinctNodes(Arrays.asList(routing2, routing4))); + } + /** reverse engineer the in sync aid based on the given indexRoutingTable **/ public static IndexMetaData updateActiveAllocations(IndexRoutingTable indexRoutingTable, IndexMetaData indexMetaData) { IndexMetaData.Builder imdBuilder = IndexMetaData.builder(indexMetaData); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java index 806e136bba3..1ae1620520e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java @@ -118,10 +118,8 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { ClusterState clusterState = rebalance.v1(); RebalanceDecision rebalanceDecision = rebalance.v2(); assertEquals(Type.YES, rebalanceDecision.getCanRebalanceDecision().type()); - assertEquals(Type.YES, rebalanceDecision.getFinalDecisionType()); assertNotNull(rebalanceDecision.getFinalExplanation()); assertEquals(clusterState.nodes().getSize() - 1, rebalanceDecision.getNodeDecisions().size()); - assertNotNull(rebalanceDecision.getAssignedNodeId()); } public void testRebalancingNotAllowedDueToCanAllocate() { diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java index 111b84f981a..3d345f24dbe 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java @@ -535,7 +535,7 @@ public class ClusterServiceIT extends ESIntegTestCase { // there should not be any master as the minimum number of required eligible masters is not met awaitBusy(() -> clusterService1.state().nodes().getMasterNode() == null && - clusterService1.state().status() == ClusterState.ClusterStateStatus.APPLIED); + clusterService1.clusterServiceState().getClusterStateStatus() == ClusterStateStatus.APPLIED); assertThat(testService1.master(), is(false)); // bring the node back up diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java index 1ea6853ee7c..9fd4fc18514 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.BaseFuture; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.test.ESTestCase; @@ -68,8 +69,10 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; @@ -303,6 +306,61 @@ public class ClusterServiceTests extends ESTestCase { assertTrue(published.get()); } + public void testBlockingCallInClusterStateTaskListenerFails() throws InterruptedException { + assumeTrue("assertions must be enabled for this test to work", BaseFuture.class.desiredAssertionStatus()); + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference assertionRef = new AtomicReference<>(); + + clusterService.submitStateUpdateTask( + "testBlockingCallInClusterStateTaskListenerFails", + new Object(), + ClusterStateTaskConfig.build(Priority.NORMAL), + new ClusterStateTaskExecutor() { + @Override + public boolean runOnlyOnMaster() { + return false; + } + + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + ClusterState newClusterState = ClusterState.builder(currentState).build(); + return BatchResult.builder().successes(tasks).build(newClusterState); + } + + @Override + public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { + assertNotNull(assertionRef.get()); + } + }, + new ClusterStateTaskListener() { + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + BaseFuture future = new BaseFuture() {}; + try { + if (randomBoolean()) { + future.get(1L, TimeUnit.SECONDS); + } else { + future.get(); + } + } catch (Exception e) { + throw new RuntimeException(e); + } catch (AssertionError e) { + assertionRef.set(e); + latch.countDown(); + } + } + + @Override + public void onFailure(String source, Exception e) { + } + } + ); + + latch.await(); + assertNotNull(assertionRef.get()); + assertThat(assertionRef.get().getMessage(), containsString("not be the cluster state update thread. Reason: [Blocking operation]")); + } + public void testOneExecutorDontStarveAnother() throws InterruptedException { final List executionOrder = Collections.synchronizedList(new ArrayList<>()); final Semaphore allowProcessing = new Semaphore(0); diff --git a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java index 9e85db5f18e..159b8693b84 100644 --- a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java +++ b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java @@ -30,6 +30,8 @@ import org.hamcrest.TypeSafeMatcher; import org.joda.time.DateTime; import org.joda.time.DateTimeConstants; import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; import java.util.ArrayList; @@ -41,6 +43,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.startsWith; public class TimeZoneRoundingTests extends ESTestCase { @@ -511,6 +514,25 @@ public class TimeZoneRoundingTests extends ESTestCase { } } + /** + * Test that time zones are correctly parsed. There is a bug with + * Joda 2.9.4 (see https://github.com/JodaOrg/joda-time/issues/373) + */ + public void testsTimeZoneParsing() { + final DateTime expected = new DateTime(2016, 11, 10, 5, 37, 59, randomDateTimeZone()); + + // Formatter used to print and parse the sample date. + // Printing the date works but parsing it back fails + // with Joda 2.9.4 + DateTimeFormatter formatter = DateTimeFormat.forPattern("YYYY-MM-dd'T'HH:mm:ss " + randomFrom("ZZZ", "[ZZZ]", "'['ZZZ']'")); + + String dateTimeAsString = formatter.print(expected); + assertThat(dateTimeAsString, startsWith("2016-11-10T05:37:59 ")); + + DateTime parsedDateTime = formatter.parseDateTime(dateTimeAsString); + assertThat(parsedDateTime.getZone(), equalTo(expected.getZone())); + } + private static void assertInterval(long rounded, long nextRoundingValue, Rounding rounding, int minutes, DateTimeZone tz) { assertInterval(rounded, dateBetween(rounded, nextRoundingValue), nextRoundingValue, rounding, tz); diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java index d402f09f07d..3f914f61d48 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java @@ -319,6 +319,9 @@ public class ThreadContextTests extends ESTestCase { // But we do inside of it withContext.run(); + + // but not after + assertNull(threadContext.getHeader("foo")); } } @@ -350,6 +353,177 @@ public class ThreadContextTests extends ESTestCase { } } + public void testPreservesThreadsOriginalContextOnRunException() throws IOException { + try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { + Runnable withContext; + + // create a abstract runnable, add headers and transient objects and verify in the methods + try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("foo", "bar_transient"); + withContext = threadContext.preserveContext(new AbstractRunnable() { + + @Override + public void onAfter() { + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals("bar_transient", threadContext.getTransient("foo")); + assertNotNull(threadContext.getTransient("failure")); + assertEquals("exception from doRun", ((RuntimeException)threadContext.getTransient("failure")).getMessage()); + assertFalse(threadContext.isDefaultContext()); + threadContext.putTransient("after", "after"); + } + + @Override + public void onFailure(Exception e) { + assertEquals("exception from doRun", e.getMessage()); + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals("bar_transient", threadContext.getTransient("foo")); + assertFalse(threadContext.isDefaultContext()); + threadContext.putTransient("failure", e); + } + + @Override + protected void doRun() throws Exception { + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals("bar_transient", threadContext.getTransient("foo")); + assertFalse(threadContext.isDefaultContext()); + throw new RuntimeException("exception from doRun"); + } + }); + } + + // We don't see the header outside of the runnable + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("foo")); + assertNull(threadContext.getTransient("failure")); + assertNull(threadContext.getTransient("after")); + assertTrue(threadContext.isDefaultContext()); + + // But we do inside of it + withContext.run(); + + // verify not seen after + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("foo")); + assertNull(threadContext.getTransient("failure")); + assertNull(threadContext.getTransient("after")); + assertTrue(threadContext.isDefaultContext()); + + // repeat with regular runnable + try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("foo", "bar_transient"); + withContext = threadContext.preserveContext(() -> { + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals("bar_transient", threadContext.getTransient("foo")); + assertFalse(threadContext.isDefaultContext()); + threadContext.putTransient("run", true); + throw new RuntimeException("exception from run"); + }); + } + + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("foo")); + assertNull(threadContext.getTransient("run")); + assertTrue(threadContext.isDefaultContext()); + + final Runnable runnable = withContext; + RuntimeException e = expectThrows(RuntimeException.class, runnable::run); + assertEquals("exception from run", e.getMessage()); + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("foo")); + assertNull(threadContext.getTransient("run")); + assertTrue(threadContext.isDefaultContext()); + } + } + + public void testPreservesThreadsOriginalContextOnFailureException() throws IOException { + try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { + Runnable withContext; + + // a runnable that throws from onFailure + try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("foo", "bar_transient"); + withContext = threadContext.preserveContext(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + throw new RuntimeException("from onFailure", e); + } + + @Override + protected void doRun() throws Exception { + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals("bar_transient", threadContext.getTransient("foo")); + assertFalse(threadContext.isDefaultContext()); + throw new RuntimeException("from doRun"); + } + }); + } + + // We don't see the header outside of the runnable + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("foo")); + assertTrue(threadContext.isDefaultContext()); + + // But we do inside of it + RuntimeException e = expectThrows(RuntimeException.class, withContext::run); + assertEquals("from onFailure", e.getMessage()); + assertEquals("from doRun", e.getCause().getMessage()); + + // but not after + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("foo")); + assertTrue(threadContext.isDefaultContext()); + } + } + + public void testPreservesThreadsOriginalContextOnAfterException() throws IOException { + try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { + Runnable withContext; + + // a runnable that throws from onAfter + try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("foo", "bar_transient"); + withContext = threadContext.preserveContext(new AbstractRunnable() { + + @Override + public void onAfter() { + throw new RuntimeException("from onAfter"); + } + + @Override + public void onFailure(Exception e) { + throw new RuntimeException("from onFailure", e); + } + + @Override + protected void doRun() throws Exception { + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals("bar_transient", threadContext.getTransient("foo")); + assertFalse(threadContext.isDefaultContext()); + } + }); + } + + // We don't see the header outside of the runnable + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("foo")); + assertTrue(threadContext.isDefaultContext()); + + // But we do inside of it + RuntimeException e = expectThrows(RuntimeException.class, withContext::run); + assertEquals("from onAfter", e.getMessage()); + assertNull(e.getCause()); + + // but not after + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("foo")); + assertTrue(threadContext.isDefaultContext()); + } + } + /** * Sometimes wraps a Runnable in an AbstractRunnable. */ diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java index cce349f417c..cbcff431c2b 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; @@ -75,4 +76,30 @@ public class XContentParserTests extends ESTestCase { assertThat(e.getMessage(), containsString("Failed to parse list")); } } + + public void testReadMapStrings() throws IOException { + Map map = readMapStrings("{\"foo\": {\"kbar\":\"vbar\"}}"); + assertThat(map.get("kbar"), equalTo("vbar")); + assertThat(map.size(), equalTo(1)); + map = readMapStrings("{\"foo\": {\"kbar\":\"vbar\", \"kbaz\":\"vbaz\"}}"); + assertThat(map.get("kbar"), equalTo("vbar")); + assertThat(map.get("kbaz"), equalTo("vbaz")); + assertThat(map.size(), equalTo(2)); + map = readMapStrings("{\"foo\": {}}"); + assertThat(map.size(), equalTo(0)); + } + + @SuppressWarnings("unchecked") + private static Map readMapStrings(String source) throws IOException { + try (XContentParser parser = XContentType.JSON.xContent().createParser(source)) { + XContentParser.Token token = parser.nextToken(); + assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); + token = parser.nextToken(); + assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("foo")); + token = parser.nextToken(); + assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); + return randomBoolean() ? parser.mapStringsOrdered() : parser.mapStrings(); + } + } } diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java index 28775defe45..a2001504f19 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java @@ -18,23 +18,47 @@ */ package org.elasticsearch.discovery; +import java.io.IOException; import java.util.Arrays; import java.util.Collections; +import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; import java.util.function.Supplier; -import org.elasticsearch.common.inject.ModuleTestCase; +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; +import org.elasticsearch.discovery.zen.ZenPing; import org.elasticsearch.plugins.DiscoveryPlugin; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.NoopDiscovery; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.junit.After; +import org.junit.Before; +import org.mockito.Mock; +import org.mockito.Mockito; -public class DiscoveryModuleTests extends ModuleTestCase { +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; - public interface DummyDiscoPlugin extends DiscoveryPlugin { +public class DiscoveryModuleTests extends ESTestCase { + + private TransportService transportService; + private ClusterService clusterService; + private ThreadPool threadPool; + + public interface DummyHostsProviderPlugin extends DiscoveryPlugin { Map> impl(); @Override default Map> getZenHostsProviders(TransportService transportService, @@ -43,49 +67,96 @@ public class DiscoveryModuleTests extends ModuleTestCase { } } - public void testRegisterDefaults() { - Settings settings = Settings.EMPTY; - DiscoveryModule module = new DiscoveryModule(settings, null, null, Collections.emptyList()); - assertBinding(module, Discovery.class, ZenDiscovery.class); + public interface DummyDiscoveryPlugin extends DiscoveryPlugin { + Map> impl(); + @Override + default Map> getDiscoveryTypes(ThreadPool threadPool, TransportService transportService, + ClusterService clusterService, UnicastHostsProvider hostsProvider) { + return impl(); + } + } + + @Before + public void setupDummyServices() { + transportService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, null, null); + clusterService = mock(ClusterService.class); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + threadPool = mock(ThreadPool.class); + } + + @After + public void clearDummyServices() throws IOException { + IOUtils.close(transportService); + } + + private DiscoveryModule newModule(Settings settings, List plugins) { + return new DiscoveryModule(settings, threadPool, transportService, null, clusterService, plugins); + } + + public void testDefaults() { + DiscoveryModule module = newModule(Settings.EMPTY, Collections.emptyList()); + assertTrue(module.getDiscovery() instanceof ZenDiscovery); + } + + public void testLazyConstructionDiscovery() { + DummyDiscoveryPlugin plugin = () -> Collections.singletonMap("custom", + () -> { throw new AssertionError("created discovery type which was not selected"); }); + newModule(Settings.EMPTY, Collections.singletonList(plugin)); } public void testRegisterDiscovery() { Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "custom").build(); - DummyDiscoPlugin plugin = () -> Collections.singletonMap("custom", () -> Collections::emptyList); - DiscoveryModule module = new DiscoveryModule(settings, null, null, Collections.singletonList(plugin)); - module.addDiscoveryType("custom", NoopDiscovery.class); - assertBinding(module, Discovery.class, NoopDiscovery.class); + DummyDiscoveryPlugin plugin = () -> Collections.singletonMap("custom", NoopDiscovery::new); + DiscoveryModule module = newModule(settings, Collections.singletonList(plugin)); + assertTrue(module.getDiscovery() instanceof NoopDiscovery); + } + + public void testUnknownDiscovery() { + Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "dne").build(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + newModule(settings, Collections.emptyList())); + assertEquals("Unknown discovery type [dne]", e.getMessage()); + } + + public void testDuplicateDiscovery() { + DummyDiscoveryPlugin plugin1 = () -> Collections.singletonMap("dup", () -> null); + DummyDiscoveryPlugin plugin2 = () -> Collections.singletonMap("dup", () -> null); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + newModule(Settings.EMPTY, Arrays.asList(plugin1, plugin2))); + assertEquals("Cannot register discovery type [dup] twice", e.getMessage()); } public void testHostsProvider() { Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "custom").build(); final UnicastHostsProvider provider = Collections::emptyList; - DummyDiscoPlugin plugin = () -> Collections.singletonMap("custom", () -> provider); - DiscoveryModule module = new DiscoveryModule(settings, null, null, Collections.singletonList(plugin)); - assertInstanceBinding(module, UnicastHostsProvider.class, instance -> instance == provider); - } - - public void testHostsProviderBwc() { - Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "custom").build(); - final UnicastHostsProvider provider = Collections::emptyList; - DummyDiscoPlugin plugin = () -> Collections.singletonMap("custom", () -> provider); - DiscoveryModule module = new DiscoveryModule(settings, null, null, Collections.singletonList(plugin)); - module.addDiscoveryType("custom", NoopDiscovery.class); - assertInstanceBinding(module, UnicastHostsProvider.class, instance -> instance == provider); + AtomicBoolean created = new AtomicBoolean(false); + DummyHostsProviderPlugin plugin = () -> Collections.singletonMap("custom", () -> { + created.set(true); + return Collections::emptyList; + }); + newModule(settings, Collections.singletonList(plugin)); + assertTrue(created.get()); } public void testUnknownHostsProvider() { Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "dne").build(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - new DiscoveryModule(settings, null, null, Collections.emptyList())); + newModule(settings, Collections.emptyList())); assertEquals("Unknown zen hosts provider [dne]", e.getMessage()); } public void testDuplicateHostsProvider() { - DummyDiscoPlugin plugin1 = () -> Collections.singletonMap("dup", () -> null); - DummyDiscoPlugin plugin2 = () -> Collections.singletonMap("dup", () -> null); + DummyHostsProviderPlugin plugin1 = () -> Collections.singletonMap("dup", () -> null); + DummyHostsProviderPlugin plugin2 = () -> Collections.singletonMap("dup", () -> null); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - new DiscoveryModule(Settings.EMPTY, null, null, Arrays.asList(plugin1, plugin2))); - assertEquals("Cannot specify zen hosts provider [dup] twice", e.getMessage()); + newModule(Settings.EMPTY, Arrays.asList(plugin1, plugin2))); + assertEquals("Cannot register zen hosts provider [dup] twice", e.getMessage()); + } + + public void testLazyConstructionHostsProvider() { + DummyHostsProviderPlugin plugin = () -> Collections.singletonMap("custom", + () -> { throw new AssertionError("created hosts provider which was not selected"); }); + newModule(Settings.EMPTY, Collections.singletonList(plugin)); } } diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index ca4b1c9b120..22844e05881 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -43,6 +43,8 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.ClusterStateStatus; +import org.elasticsearch.cluster.service.ClusterServiceState; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; @@ -65,6 +67,7 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.disruption.BlockClusterStateProcessing; import org.elasticsearch.test.disruption.IntermittentLongGCDisruption; import org.elasticsearch.test.disruption.LongGCDisruption; @@ -127,14 +130,10 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { private ClusterDiscoveryConfiguration discoveryConfig; - @Override - protected boolean addMockZenPings() { - return false; - } - @Override protected Settings nodeSettings(int nodeOrdinal) { - return discoveryConfig.nodeSettings(nodeOrdinal); + return Settings.builder().put(discoveryConfig.nodeSettings(nodeOrdinal)) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false).build(); } @Before @@ -173,7 +172,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } @Override - protected void beforeIndexDeletion() throws IOException { + protected void beforeIndexDeletion() throws Exception { if (disableBeforeIndexDeletion == false) { super.beforeIndexDeletion(); } @@ -194,7 +193,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { ensureStableCluster(numberOfNodes); // TODO: this is a temporary solution so that nodes will not base their reaction to a partition based on previous successful results - ZenPing zenPing = internalCluster().getInstance(ZenPing.class); + ZenPing zenPing = ((TestZenDiscovery)internalCluster().getInstance(Discovery.class)).getZenPing(); if (zenPing instanceof UnicastZenPing) { ((UnicastZenPing) zenPing).clearTemporalResponses(); } @@ -854,7 +853,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // Forcefully clean temporal response lists on all nodes. Otherwise the node in the unicast host list // includes all the other nodes that have pinged it and the issue doesn't manifest - ZenPing zenPing = internalCluster().getInstance(ZenPing.class); + ZenPing zenPing = ((TestZenDiscovery)internalCluster().getInstance(Discovery.class)).getZenPing(); if (zenPing instanceof UnicastZenPing) { ((UnicastZenPing) zenPing).clearTemporalResponses(); } @@ -891,7 +890,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // Forcefully clean temporal response lists on all nodes. Otherwise the node in the unicast host list // includes all the other nodes that have pinged it and the issue doesn't manifest - ZenPing zenPing = internalCluster().getInstance(ZenPing.class); + ZenPing zenPing = ((TestZenDiscovery)internalCluster().getInstance(Discovery.class)).getZenPing(); if (zenPing instanceof UnicastZenPing) { ((UnicastZenPing) zenPing).clearTemporalResponses(); } @@ -1201,9 +1200,9 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // Don't restart the master node until we know the index deletion has taken effect on master and the master eligible node. assertBusy(() -> { for (String masterNode : allMasterEligibleNodes) { - final ClusterState masterState = internalCluster().clusterService(masterNode).state(); - assertTrue("index not deleted on " + masterNode, masterState.metaData().hasIndex(idxName) == false && - masterState.status() == ClusterState.ClusterStateStatus.APPLIED); + final ClusterServiceState masterState = internalCluster().clusterService(masterNode).clusterServiceState(); + assertTrue("index not deleted on " + masterNode, masterState.getClusterState().metaData().hasIndex(idxName) == false && + masterState.getClusterStateStatus() == ClusterStateStatus.APPLIED); } }); internalCluster().restartNode(masterNode1, InternalTestCluster.EMPTY_CALLBACK); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index 88cf23fe938..acc5d4e8018 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -19,6 +19,16 @@ package org.elasticsearch.discovery.zen; +import java.io.Closeable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; @@ -34,22 +44,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.zen.PublishClusterStateActionTests.AssertingAckListener; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.discovery.MockZenPing; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.io.Closeable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.discovery.zen.ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING; @@ -269,8 +268,7 @@ public class ZenDiscoveryUnitTests extends ESTestCase { } private ZenDiscovery buildZenDiscovery(Settings settings, TransportService service, ClusterService clusterService, ThreadPool threadPool) { - ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - ZenDiscovery zenDiscovery = new ZenDiscovery(settings, threadPool, service, clusterService, clusterSettings, new MockZenPing(settings)); + ZenDiscovery zenDiscovery = new ZenDiscovery(settings, threadPool, service, clusterService, Collections::emptyList); zenDiscovery.start(); return zenDiscovery; } diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 1573e552849..4e1ff3f8678 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -53,6 +53,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.stream.IntStream; @@ -356,7 +357,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { .setSource(jsonBuilder().startObject().startObject("type2").endObject().endObject()) .execute().actionGet(); client.admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() @@ -383,7 +384,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { ClusterState state = client().admin().cluster().prepareState().execute().actionGet().getState(); assertThat(state.metaData().index("test").mapping("type2"), notNullValue()); - assertThat(state.metaData().templates().get("template_1").template(), equalTo("te*")); + assertThat(state.metaData().templates().get("template_1").patterns(), equalTo(Collections.singletonList("te*"))); assertThat(state.metaData().index("test").getAliases().get("test_alias"), notNullValue()); assertThat(state.metaData().index("test").getAliases().get("test_alias").filter(), notNullValue()); } diff --git a/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java b/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java index cc9f9108748..0f72e72f6a2 100644 --- a/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java +++ b/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java @@ -109,7 +109,7 @@ public class WaitUntilRefreshIT extends ESIntegTestCase { assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "cat")).get(), "2"); // Update-becomes-delete with RefreshPolicy.WAIT_UNTIL - update = client().prepareUpdate("test", "test", "2").setScript(new Script("delete_plz", ScriptType.INLINE, "native", emptyMap())) + update = client().prepareUpdate("test", "test", "2").setScript(new Script(ScriptType.INLINE, "native", "delete_plz", emptyMap())) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).get(); assertEquals(2, update.getVersion()); assertFalse("request shouldn't have forced a refresh", update.forcedRefresh()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index a80f94845d1..cf6335c808a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -25,19 +25,15 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormat; import org.junit.Before; import java.io.IOException; @@ -354,4 +350,39 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, defaultMapper.mappingSource().toString()); } + + /** + * Test that time zones are correctly parsed by the {@link DateFieldMapper}. + * There is a known bug with Joda 2.9.4 reported in https://github.com/JodaOrg/joda-time/issues/373. + */ + public void testTimeZoneParsing() throws Exception { + final String timeZonePattern = "yyyy-MM-dd" + randomFrom("ZZZ", "[ZZZ]", "'['ZZZ']'"); + + String mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "date") + .field("format", timeZonePattern) + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + final DateTimeZone randomTimeZone = randomBoolean() ? DateTimeZone.forID(randomFrom("UTC", "CET")) : randomDateTimeZone(); + final DateTime randomDate = new DateTime(2016, 03, 11, 0, 0, 0, randomTimeZone); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate)) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + + assertEquals(randomDate.withZone(DateTimeZone.UTC).getMillis(), fields[0].numericValue().longValue()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIT.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIT.java index ca4f7097bfd..e6d5769467b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.indices.TypeMissingException; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; +import java.util.Collections; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; @@ -129,7 +130,7 @@ public class DynamicMappingIT extends ESIntegTestCase { public void testAutoCreateWithDisabledDynamicMappings() throws Exception { assertAcked(client().admin().indices().preparePutTemplate("my_template") .setCreate(true) - .setTemplate("index_*") + .setPatterns(Collections.singletonList("index_*")) .addMapping("foo", "field", "type=keyword") .setSettings(Settings.builder().put("index.mapper.dynamic", false).build()) .get()); diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index 607415d7627..2e1f70ab5f5 100644 --- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -320,15 +320,14 @@ public class InnerHitBuilderTests extends ESTestCase { static SearchSourceBuilder.ScriptField randomScript() { ScriptType randomScriptType = randomFrom(ScriptType.values()); - Map randomMap = null; + Map randomMap = new HashMap<>(); if (randomBoolean()) { - randomMap = new HashMap<>(); int numEntries = randomIntBetween(0, 32); for (int i = 0; i < numEntries; i++) { randomMap.put(String.valueOf(i), randomAsciiOfLength(16)); } } - Script script = new Script(randomAsciiOfLength(128), randomScriptType, randomAsciiOfLengthBetween(1, 4),randomMap); + Script script = new Script(randomScriptType, randomAsciiOfLengthBetween(1, 4), randomAsciiOfLength(128), randomMap); return new SearchSourceBuilder.ScriptField(randomAsciiOfLengthBetween(1, 32), script, randomBoolean()); } diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java index 3e1e74e8ebd..97adfad95dc 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java @@ -293,13 +293,11 @@ public class QueryDSLDocumentationTests extends ESTestCase { new Script("doc['num1'].value > 1") ); - Map parameters = new HashMap<>(); + Map parameters = new HashMap<>(); parameters.put("param1", 5); scriptQuery( new Script( - "mygroovyscript", - ScriptType.FILE, - "groovy", + ScriptType.FILE, "groovy", "mygroovyscript", parameters) ); diff --git a/core/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java index 1eb2b23dceb..3be16e27c48 100644 --- a/core/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java @@ -39,7 +39,7 @@ public class ScriptQueryBuilderTests extends AbstractQueryTestCase params = Collections.emptyMap(); - return new ScriptQueryBuilder(new Script(script, ScriptType.INLINE, MockScriptEngine.NAME, params)); + return new ScriptQueryBuilder(new Script(ScriptType.INLINE, MockScriptEngine.NAME, script, params)); } @Override @@ -67,8 +67,7 @@ public class ScriptQueryBuilderTests extends AbstractQueryTestCase getObjectsHoldingArbitraryContent() { //script_score.script.params can contain arbitrary parameters. no error is expected when //adding additional objects within the params object. - return Collections.singleton(Script.ScriptField.PARAMS.getPreferredName()); + return Collections.singleton(Script.PARAMS_PARSE_FIELD.getPreferredName()); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index a8a1de059e8..98c6ac00344 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -31,6 +31,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.TestUtil; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; @@ -42,6 +44,7 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; @@ -252,7 +255,12 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase field = queryBuilder.fields().entrySet().iterator().next(); assertTermOrBoostQuery(query, field.getKey(), queryBuilder.value(), field.getValue()); } else if (queryBuilder.fields().size() == 0) { - assertTermQuery(query, MetaData.ALL, queryBuilder.value()); + MapperService ms = context.mapperService(); + if (ms.allEnabled()) { + assertTermQuery(query, MetaData.ALL, queryBuilder.value()); + } else { + assertThat(query.getClass(), equalTo(MatchNoDocsQuery.class)); + } } else { fail("Encountered lucene query type we do not have a validation implementation for in our " + SimpleQueryStringBuilderTests.class.getSimpleName()); @@ -398,4 +406,19 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase parseQuery(json)); + assertThat(e.getMessage(), + containsString("cannot use [all_fields] parameter in conjunction with [fields]")); + } } diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index 50a7382e404..77edf2ed2f8 100644 --- a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -109,7 +109,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase getObjectsHoldingArbitraryContent() { //script_score.script.params can contain arbitrary parameters. no error is expected when adding additional objects //within the params object. Score functions get parsed in the data nodes, so they are not validated in the coord node. - return new HashSet<>(Arrays.asList(Script.ScriptField.PARAMS.getPreferredName(), ExponentialDecayFunctionBuilder.NAME, + return new HashSet<>(Arrays.asList(Script.PARAMS_PARSE_FIELD.getPreferredName(), ExponentialDecayFunctionBuilder.NAME, LinearDecayFunctionBuilder.NAME, GaussDecayFunctionBuilder.NAME)); } @@ -169,7 +169,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase params = Collections.emptyMap(); functionBuilder = new ScriptScoreFunctionBuilder( - new Script(script, ScriptType.INLINE, MockScriptEngine.NAME, params)); + new Script(ScriptType.INLINE, MockScriptEngine.NAME, script, params)); break; case 3: RandomScoreFunctionBuilder randomScoreFunctionBuilder = new RandomScoreFunctionBuilderWithFixedSeed(); diff --git a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index cb9eb5a85a4..985116334a5 100644 --- a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -367,6 +367,13 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase protected PrimaryResult performOnPrimary(IndexShard primary, IndexRequest request) throws Exception { final Engine.IndexResult indexResult = executeIndexRequestOnPrimary(request, primary, null); + if (indexResult.hasFailure() == false) { + // update the version on request so it will happen on the replicas + final long version = indexResult.getVersion(); + request.version(version); + request.versionType(request.versionType().versionTypeForReplicationAndRecovery()); + assert request.versionType().validateVersionForWrites(request.version()); + } request.primaryTerm(primary.getPrimaryTerm()); TransportWriteActionTestHelper.performPostWriteActions(primary, request, indexResult.getTranslogLocation(), logger); IndexResponse response = new IndexResponse(primary.shardId(), request.type(), request.id(), indexResult.getVersion(), diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 577b20c1fae..9750cd35d01 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -35,15 +35,27 @@ import org.elasticsearch.gateway.GatewayMetaState; import org.elasticsearch.gateway.LocalAllocateDangledIndices; import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.StringFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; +import org.elasticsearch.index.similarity.BM25SimilarityProvider; +import org.elasticsearch.indices.IndicesService.ShardDeletionCheckResult; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -52,6 +64,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; public class IndicesServiceTests extends ESSingleNodeTestCase { @@ -64,6 +77,30 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { return getInstanceFromNode(NodeEnvironment.class); } + @Override + protected Collection> getPlugins() { + ArrayList> plugins = new ArrayList<>(super.getPlugins()); + plugins.add(TestPlugin.class); + return plugins; + } + + public static class TestPlugin extends Plugin implements MapperPlugin { + + public TestPlugin() {} + + @Override + public Map getMappers() { + return Collections.singletonMap("fake-mapper", new StringFieldMapper.TypeParser()); + } + + @Override + public void onIndexModule(IndexModule indexModule) { + super.onIndexModule(indexModule); + indexModule.addSimilarity("fake-similarity", BM25SimilarityProvider::new); + } + } + + @Override protected boolean resetNodeAfterTest() { return true; @@ -92,16 +129,19 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { 1).build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings()); ShardId shardId = new ShardId(meta.getIndex(), 0); - assertFalse("no shard location", indicesService.canDeleteShardContent(shardId, indexSettings)); + assertEquals("no shard location", indicesService.canDeleteShardContent(shardId, indexSettings), + ShardDeletionCheckResult.NO_FOLDER_FOUND); IndexService test = createIndex("test"); shardId = new ShardId(test.index(), 0); assertTrue(test.hasShard(0)); - assertFalse("shard is allocated", indicesService.canDeleteShardContent(shardId, test.getIndexSettings())); + assertEquals("shard is allocated", indicesService.canDeleteShardContent(shardId, test.getIndexSettings()), + ShardDeletionCheckResult.STILL_ALLOCATED); test.removeShard(0, "boom"); - assertTrue("shard is removed", indicesService.canDeleteShardContent(shardId, test.getIndexSettings())); + assertEquals("shard is removed", indicesService.canDeleteShardContent(shardId, test.getIndexSettings()), + ShardDeletionCheckResult.FOLDER_FOUND_CAN_DELETE); ShardId notAllocated = new ShardId(test.index(), 100); - assertFalse("shard that was never on this node should NOT be deletable", - indicesService.canDeleteShardContent(notAllocated, test.getIndexSettings())); + assertEquals("shard that was never on this node should NOT be deletable", + indicesService.canDeleteShardContent(notAllocated, test.getIndexSettings()), ShardDeletionCheckResult.NO_FOLDER_FOUND); } public void testDeleteIndexStore() throws Exception { @@ -324,4 +364,26 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { } } + /** + * Tests that teh {@link MapperService} created by {@link IndicesService#createIndexMapperService(IndexMetaData)} contains + * custom types and similarities registered by plugins + */ + public void testStandAloneMapperServiceWithPlugins() throws IOException { + final String indexName = "test"; + final Index index = new Index(indexName, UUIDs.randomBase64UUID()); + final IndicesService indicesService = getIndicesService(); + final Settings idxSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) + .put(IndexModule.SIMILARITY_SETTINGS_PREFIX + ".test.type", "fake-similarity") + .build(); + final IndexMetaData indexMetaData = new IndexMetaData.Builder(index.getName()) + .settings(idxSettings) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + MapperService mapperService = indicesService.createIndexMapperService(indexMetaData); + assertNotNull(mapperService.documentMapperParser().parserContext("type").typeParser("fake-mapper")); + assertThat(mapperService.documentMapperParser().parserContext("type").getSimilarity("test"), + instanceOf(BM25SimilarityProvider.class)); + } } diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 7054b0085d0..7b87a62288f 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices.memory.breaker; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.LeafReader; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -39,16 +40,20 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.basic.SearchWithRandomExceptionsIT; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.engine.MockEngineSupport; import org.elasticsearch.test.engine.ThrowingLeafReaderWrapper; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Random; +import java.util.Set; import java.util.concurrent.ExecutionException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; @@ -60,7 +65,14 @@ import static org.hamcrest.Matchers.equalTo; public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class, MockEngineFactoryPlugin.class); + return Arrays.asList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class); + } + + @Override + protected Collection> getMockPlugins() { + Set> mocks = new HashSet<>(super.getMockPlugins()); + mocks.remove(MockEngineFactoryPlugin.class); + return mocks; } public void testBreakerWithRandomExceptions() throws IOException, InterruptedException, ExecutionException { @@ -200,14 +212,19 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, Property.IndexScope); public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, Property.IndexScope); - public static class TestPlugin extends Plugin { + public static class TestPlugin extends MockEngineFactoryPlugin { @Override public List> getSettings() { - return Arrays.asList(EXCEPTION_TOP_LEVEL_RATIO_SETTING, EXCEPTION_LOW_LEVEL_RATIO_SETTING); + List> settings = new ArrayList<>(); + settings.addAll(super.getSettings()); + settings.add(EXCEPTION_TOP_LEVEL_RATIO_SETTING); + settings.add(EXCEPTION_LOW_LEVEL_RATIO_SETTING); + return settings; } - public void onModule(MockEngineFactoryPlugin.MockEngineReaderModule module) { - module.setReaderClass(RandomExceptionDirectoryReaderWrapper.class); + @Override + protected Class getReaderWrapperClass() { + return RandomExceptionDirectoryReaderWrapper.class; } } diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index e48d5fe2628..0807ed4389c 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -32,15 +32,16 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.MergeSchedulerConfig; +import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; import org.elasticsearch.indices.IndicesService; @@ -64,15 +65,16 @@ import static org.hamcrest.Matchers.nullValue; public class UpdateSettingsIT extends ESIntegTestCase { - public void testInvalidDynamicUpdate() { createIndex("test"); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put("index.dummy", "boom") - ) - .execute().actionGet()); + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder().put("index.dummy", "boom")) + .execute() + .actionGet()); assertEquals(exception.getCause().getMessage(), "this setting goes boom"); IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertNotEquals(indexMetaData.getSettings().get("index.dummy"), "invalid dynamic value"); @@ -103,12 +105,13 @@ public class UpdateSettingsIT extends ESIntegTestCase { public void testResetDefault() { createIndex("test"); - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put("index.refresh_interval", -1) - .put("index.translog.flush_threshold_size", "1024b") - ) - .execute().actionGet(); + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder().put("index.refresh_interval", -1).put("index.translog.flush_threshold_size", "1024b")) + .execute() + .actionGet(); IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertEquals(indexMetaData.getSettings().get("index.refresh_interval"), "-1"); for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { @@ -118,11 +121,13 @@ public class UpdateSettingsIT extends ESIntegTestCase { assertEquals(indexService.getIndexSettings().getFlushThresholdSize().getBytes(), 1024); } } - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder() - .putNull("index.refresh_interval") - ) - .execute().actionGet(); + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder().putNull("index.refresh_interval")) + .execute() + .actionGet(); indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertNull(indexMetaData.getSettings().get("index.refresh_interval")); for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { @@ -136,12 +141,15 @@ public class UpdateSettingsIT extends ESIntegTestCase { public void testOpenCloseUpdateSettings() throws Exception { createIndex("test"); try { - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put("index.refresh_interval", -1) // this one can change - .put("index.fielddata.cache", "none") // this one can't - ) - .execute().actionGet(); + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder() + .put("index.refresh_interval", -1) // this one can change + .put("index.fielddata.cache", "none")) // this one can't + .execute() + .actionGet(); fail(); } catch (IllegalArgumentException e) { // all is well @@ -156,11 +164,13 @@ public class UpdateSettingsIT extends ESIntegTestCase { assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), nullValue()); assertThat(getSettingsResponse.getSetting("test", "index.fielddata.cache"), nullValue()); - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put("index.refresh_interval", -1) // this one can change - ) - .execute().actionGet(); + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder().put("index.refresh_interval", -1)) // this one can change + .execute() + .actionGet(); indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.getSettings().get("index.refresh_interval"), equalTo("-1")); @@ -171,29 +181,43 @@ public class UpdateSettingsIT extends ESIntegTestCase { // now close the index, change the non dynamic setting, and see that it applies // Wait for the index to turn green before attempting to close it - ClusterHealthResponse health = client().admin().cluster().prepareHealth().setTimeout("30s").setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); + ClusterHealthResponse health = + client() + .admin() + .cluster() + .prepareHealth() + .setTimeout("30s") + .setWaitForEvents(Priority.LANGUID) + .setWaitForGreenStatus() + .execute() + .actionGet(); assertThat(health.isTimedOut(), equalTo(false)); client().admin().indices().prepareClose("test").execute().actionGet(); try { - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - ) - .execute().actionGet(); + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)) + .execute() + .actionGet(); fail("can't change number of replicas on a closed index"); } catch (IllegalArgumentException ex) { assertTrue(ex.getMessage(), ex.getMessage().startsWith("Can't update [index.number_of_replicas] on closed indices [[test/")); assertTrue(ex.getMessage(), ex.getMessage().endsWith("]] - can leave index in an unopenable state")); // expected } - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put("index.refresh_interval", "1s") // this one can change - .put("index.fielddata.cache", "none") // this one can't - ) - .execute().actionGet(); + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder() + .put("index.refresh_interval", "1s") // this one can change + .put("index.fielddata.cache", "none")) // this one can't + .execute() + .actionGet(); indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.getSettings().get("index.refresh_interval"), equalTo("1s")); @@ -209,15 +233,14 @@ public class UpdateSettingsIT extends ESIntegTestCase { createIndex("test"); client().prepareIndex("test", "type", "1").setSource("f", 1).get(); // set version to 1 client().prepareDelete("test", "type", "1").get(); // sets version to 2 - client().prepareIndex("test", "type", "1").setSource("f", 2).setVersion(2).get(); // delete is still in cache this should work & set version to 3 - client().admin().indices().prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put("index.gc_deletes", 0) - ).get(); + // delete is still in cache this should work & set version to 3 + client().prepareIndex("test", "type", "1").setSource("f", 2).setVersion(2).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.gc_deletes", 0)).get(); client().prepareDelete("test", "type", "1").get(); // sets version to 4 Thread.sleep(300); // wait for cache time to change TODO: this needs to be solved better. To be discussed. - assertThrows(client().prepareIndex("test", "type", "1").setSource("f", 3).setVersion(4), VersionConflictEngineException.class); // delete is should not be in cache + // delete is should not be in cache + assertThrows(client().prepareIndex("test", "type", "1").setSource("f", 3).setVersion(4), VersionConflictEngineException.class); } @@ -263,9 +286,10 @@ public class UpdateSettingsIT extends ESIntegTestCase { .admin() .indices() .prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "merge") - .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), "1mb")) + .setSettings( + Settings.builder() + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "merge") + .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), "1mb")) .get(); // Make sure setting says it is in fact changed: @@ -303,8 +327,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { .admin() .indices() .prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "none")) + .setSettings(Settings.builder().put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "none")) .get(); // Optimize does a waitForMerges, which we must do to make sure all in-flight (throttled) merges finish: @@ -364,25 +387,31 @@ public class UpdateSettingsIT extends ESIntegTestCase { @Override public void append(LogEvent event) { String message = event.getMessage().getFormattedMessage(); - if (event.getLevel() == Level.TRACE && - event.getLoggerName().endsWith("lucene.iw")) { + if (event.getLevel() == Level.TRACE && event.getLoggerName().endsWith("lucene.iw")) { } - if (event.getLevel() == Level.INFO && message.contains("updating [index.merge.scheduler.max_thread_count] from [10000] to [1]")) { + if (event.getLevel() == Level.INFO + && message.contains("updating [index.merge.scheduler.max_thread_count] from [10000] to [1]")) { sawUpdateMaxThreadCount = true; } - if (event.getLevel() == Level.INFO && message.contains("updating [index.merge.scheduler.auto_throttle] from [true] to [false]")) { + if (event.getLevel() == Level.INFO + && message.contains("updating [index.merge.scheduler.auto_throttle] from [true] to [false]")) { sawUpdateAutoThrottle = true; } } + @Override + public boolean ignoreExceptions() { + return false; + } + } - public void testUpdateAutoThrottleSettings() throws IllegalAccessException { + public void testUpdateAutoThrottleSettings() throws Exception { MockAppender mockAppender = new MockAppender("testUpdateAutoThrottleSettings"); mockAppender.start(); Logger rootLogger = LogManager.getRootLogger(); - Level savedLevel = rootLogger.getLevel(); Loggers.addAppender(rootLogger, mockAppender); + Level savedLevel = rootLogger.getLevel(); Loggers.setLevel(rootLogger, Level.TRACE); try { @@ -395,18 +424,24 @@ public class UpdateSettingsIT extends ESIntegTestCase { .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "2") - .put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), "true") - )); + .put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), "true"))); // Disable auto throttle: client() .admin() .indices() .prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), "false")) + .setSettings(Settings.builder().put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), "false")) .get(); + // if a node has processed the cluster state update but not yet returned from the update task, it might still log messages; + // these log messages will race with the stopping of the appender so we wait to ensure these tasks are done processing + assertBusy(() -> { + for (final ClusterService service : internalCluster().getInstances(ClusterService.class)) { + assertThat(service.numberOfPendingTasks(), equalTo(0)); + } + }); + // Make sure we log the change: assertTrue(mockAppender.sawUpdateAutoThrottle); @@ -414,9 +449,11 @@ public class UpdateSettingsIT extends ESIntegTestCase { GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey()), equalTo("false")); } finally { - Loggers.removeAppender(rootLogger, mockAppender); - mockAppender.stop(); Loggers.setLevel(rootLogger, savedLevel); + Loggers.removeAppender(rootLogger, mockAppender); + // don't call stop here some node might still use this reference at this point causing tests to fail. + // this is only relevant in integ tests, unittest can control what uses a logger and what doesn't + // mockAppender.stop(); } } @@ -445,22 +482,24 @@ public class UpdateSettingsIT extends ESIntegTestCase { )); { - UpdateSettingsRequestBuilder updateBuilder = client().admin().indices() - .prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1000") - ); + UpdateSettingsRequestBuilder updateBuilder = + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder().put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1000")); exc = expectThrows(IllegalArgumentException.class, () -> updateBuilder.get()); assertThat(exc.getMessage(), equalTo("maxThreadCount (= 1000) should be <= maxMergeCount (= 100)")); } { - UpdateSettingsRequestBuilder updateBuilder = client().admin().indices() - .prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "10") - ); + UpdateSettingsRequestBuilder updateBuilder = + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder().put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "10")); exc = expectThrows(IllegalArgumentException.class, () -> updateBuilder.get()); assertThat(exc.getMessage(), equalTo("maxThreadCount (= 100) should be <= maxMergeCount (= 10)")); @@ -468,7 +507,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { } // #6882: make sure we can change index.merge.scheduler.max_thread_count live - public void testUpdateMergeMaxThreadCount() throws IllegalAccessException { + public void testUpdateMergeMaxThreadCount() throws Exception { MockAppender mockAppender = new MockAppender("testUpdateMergeMaxThreadCount"); mockAppender.start(); Logger rootLogger = LogManager.getRootLogger(); @@ -485,8 +524,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), "2") .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "10000") - .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "10000") - )); + .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "10000"))); assertFalse(mockAppender.sawUpdateMaxThreadCount); // Now make a live change to reduce allowed merge threads: @@ -494,11 +532,17 @@ public class UpdateSettingsIT extends ESIntegTestCase { .admin() .indices() .prepareUpdateSettings("test") - .setSettings(Settings.builder() - .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") - ) + .setSettings(Settings.builder().put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1")) .get(); + // if a node has processed the cluster state update but not yet returned from the update task, it might still log messages; + // these log messages will race with the stopping of the appender so we wait to ensure these tasks are done processing + assertBusy(() -> { + for (final ClusterService service : internalCluster().getInstances(ClusterService.class)) { + assertThat(service.numberOfPendingTasks(), equalTo(0)); + } + }); + // Make sure we log the change: assertTrue(mockAppender.sawUpdateMaxThreadCount); @@ -507,9 +551,11 @@ public class UpdateSettingsIT extends ESIntegTestCase { assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey()), equalTo("1")); } finally { - Loggers.removeAppender(rootLogger, mockAppender); - mockAppender.stop(); Loggers.setLevel(rootLogger, savedLevel); + Loggers.removeAppender(rootLogger, mockAppender); + // don't call stop here some node might still use this reference at this point causing tests to fail. + // this is only relevant in integ tests, unittest can control what uses a logger and what doesn't + // mockAppender.stop(); } } @@ -538,4 +584,5 @@ public class UpdateSettingsIT extends ESIntegTestCase { } } } + } diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 8582ca0e02f..c55fc514332 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.discovery.zen.ElectMasterService; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; @@ -50,6 +51,7 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.disruption.BlockClusterStateProcessing; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -76,8 +78,9 @@ import static org.hamcrest.Matchers.instanceOf; public class RareClusterStateIT extends ESIntegTestCase { @Override - protected boolean addMockZenPings() { - return false; + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false).build(); } @Override @@ -322,7 +325,12 @@ public class RareClusterStateIT extends ESIntegTestCase { // Here we want to test that everything goes well if the mappings that // are needed for a document are not available on the replica at the // time of indexing it - final List nodeNames = internalCluster().startNodesAsync(2).get(); + final List nodeNames = internalCluster().startNodesAsync(2, + Settings.builder() + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) + .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "30s") // explicitly set so it won't default to publish timeout + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "0s") // don't wait post commit as we are blocking things by design + .build()).get(); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut()); final String master = internalCluster().getMasterName(); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java index 74370b711d6..7f0e9350488 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java @@ -63,56 +63,22 @@ public class IndicesStoreTests extends ESTestCase { NOT_STARTED_STATES = set.toArray(new ShardRoutingState[set.size()]); } - private static ThreadPool threadPool; - - private IndicesStore indicesStore; private DiscoveryNode localNode; - private ClusterService clusterService; - - @BeforeClass - public static void beforeClass() { - threadPool = new TestThreadPool("ShardReplicationTests"); - } - - @AfterClass - public static void afterClass() { - ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); - threadPool = null; - } - @Before - public void before() { + public void createLocalNode() { localNode = new DiscoveryNode("abc", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); - clusterService = createClusterService(threadPool); - TransportService transportService = new TransportService(clusterService.getSettings(), null, null, - TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); - indicesStore = new IndicesStore(Settings.EMPTY, null, clusterService, transportService, null); - } - - @After - public void tearDown() throws Exception { - super.tearDown(); - clusterService.close(); } public void testShardCanBeDeletedNoShardRouting() throws Exception { - int numShards = randomIntBetween(1, 7); - int numReplicas = randomInt(2); - - ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test")); - clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas))); IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); - - assertFalse(indicesStore.shardCanBeDeleted(clusterState.build(), routingTable.build())); + assertFalse(IndicesStore.shardCanBeDeleted(localNode.getId(), routingTable.build())); } public void testShardCanBeDeletedNoShardStarted() throws Exception { int numShards = randomIntBetween(1, 7); int numReplicas = randomInt(2); - ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test")); - clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas))); IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); for (int i = 0; i < numShards; i++) { @@ -128,99 +94,31 @@ public class IndicesStoreTests extends ESTestCase { if (state == ShardRoutingState.UNASSIGNED) { unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null); } - routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", null, j == 0, state, unassignedInfo)); + String relocatingNodeId = state == ShardRoutingState.RELOCATING ? randomAsciiOfLength(10) : null; + routingTable.addShard(TestShardRouting.newShardRouting("test", i, randomAsciiOfLength(10), relocatingNodeId, j == 0, state, unassignedInfo)); } } - assertFalse(indicesStore.shardCanBeDeleted(clusterState.build(), routingTable.build())); + + assertFalse(IndicesStore.shardCanBeDeleted(localNode.getId(), routingTable.build())); } public void testShardCanBeDeletedShardExistsLocally() throws Exception { int numShards = randomIntBetween(1, 7); int numReplicas = randomInt(2); - ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test")); - clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas))); - clusterState.nodes(DiscoveryNodes.builder().localNodeId(localNode.getId()).add(localNode).add(new DiscoveryNode("xyz", - buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT))); IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); int localShardId = randomInt(numShards - 1); for (int i = 0; i < numShards; i++) { - String nodeId = i == localShardId ? localNode.getId() : randomBoolean() ? "abc" : "xyz"; - String relocationNodeId = randomBoolean() ? null : randomBoolean() ? localNode.getId() : "xyz"; - routingTable.addShard(TestShardRouting.newShardRouting("test", i, nodeId, relocationNodeId, true, ShardRoutingState.STARTED)); + int localNodeIndex = randomInt(numReplicas); + boolean primaryOnLocalNode = i == localShardId && localNodeIndex == numReplicas; + routingTable.addShard(TestShardRouting.newShardRouting("test", i, primaryOnLocalNode ? localNode.getId() : randomAsciiOfLength(10), true, ShardRoutingState.STARTED)); for (int j = 0; j < numReplicas; j++) { - routingTable.addShard(TestShardRouting.newShardRouting("test", i, nodeId, relocationNodeId, false, ShardRoutingState.STARTED)); + boolean replicaOnLocalNode = i == localShardId && localNodeIndex == j; + routingTable.addShard(TestShardRouting.newShardRouting("test", i, replicaOnLocalNode ? localNode.getId() : randomAsciiOfLength(10), false, ShardRoutingState.STARTED)); } } // Shard exists locally, can't delete shard - assertFalse(indicesStore.shardCanBeDeleted(clusterState.build(), routingTable.build())); - } - - public void testShardCanBeDeletedNodeNotInList() throws Exception { - int numShards = randomIntBetween(1, 7); - int numReplicas = randomInt(2); - - ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test")); - clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas))); - clusterState.nodes(DiscoveryNodes.builder().localNodeId(localNode.getId()).add(localNode)); - IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); - for (int i = 0; i < numShards; i++) { - String relocatingNodeId = randomBoolean() ? null : "def"; - routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", relocatingNodeId, true, ShardRoutingState.STARTED)); - for (int j = 0; j < numReplicas; j++) { - routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", relocatingNodeId, false, ShardRoutingState.STARTED)); - } - } - - // null node -> false - assertFalse(indicesStore.shardCanBeDeleted(clusterState.build(), routingTable.build())); - } - - public void testShardCanBeDeletedNodeVersion() throws Exception { - int numShards = randomIntBetween(1, 7); - int numReplicas = randomInt(2); - - // Most of the times don't test bwc and use current version - final Version nodeVersion = randomBoolean() ? CURRENT : randomVersion(random()); - ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test")); - clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas))); - clusterState.nodes(DiscoveryNodes.builder().localNodeId(localNode.getId()).add(localNode).add(new DiscoveryNode("xyz", - buildNewFakeTransportAddress(), emptyMap(), emptySet(), nodeVersion))); - IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); - for (int i = 0; i < numShards; i++) { - routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", null, true, ShardRoutingState.STARTED)); - for (int j = 0; j < numReplicas; j++) { - routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", null, false, ShardRoutingState.STARTED)); - } - } - - // shard exist on other node (abc) - assertTrue(indicesStore.shardCanBeDeleted(clusterState.build(), routingTable.build())); - } - - public void testShardCanBeDeletedRelocatingNode() throws Exception { - int numShards = randomIntBetween(1, 7); - int numReplicas = randomInt(2); - - ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test")); - clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas))); - final Version nodeVersion = randomBoolean() ? CURRENT : randomVersion(random()); - - clusterState.nodes(DiscoveryNodes.builder().localNodeId(localNode.getId()) - .add(localNode) - .add(new DiscoveryNode("xyz", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT)) - .add(new DiscoveryNode("def", buildNewFakeTransportAddress(), emptyMap(), emptySet(), nodeVersion) // <-- only set relocating, since we're testing that in this test - )); - IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); - for (int i = 0; i < numShards; i++) { - routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", "def", true, ShardRoutingState.STARTED)); - for (int j = 0; j < numReplicas; j++) { - routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", "def", false, ShardRoutingState.STARTED)); - } - } - - // shard exist on other node (abc and def) - assertTrue(indicesStore.shardCanBeDeleted(clusterState.build(), routingTable.build())); + assertFalse(IndicesStore.shardCanBeDeleted(localNode.getId(), routingTable.build())); } } diff --git a/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java b/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java index a21624d6e92..3e38a6df187 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import java.io.IOException; +import java.util.Collections; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; import static org.hamcrest.Matchers.hasSize; @@ -35,7 +36,7 @@ public class IndexTemplateBlocksIT extends ESIntegTestCase { public void testIndexTemplatesWithBlocks() throws IOException { // creates a simple index template client().admin().indices().preparePutTemplate("template_blocks") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() @@ -50,7 +51,7 @@ public class IndexTemplateBlocksIT extends ESIntegTestCase { assertThat(response.getIndexTemplates(), hasSize(1)); assertBlocked(client().admin().indices().preparePutTemplate("template_blocks_2") - .setTemplate("block*") + .setPatterns(Collections.singletonList("block*")) .setOrder(0) .addAlias(new Alias("alias_1"))); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index b1a4d19894e..612e1d1e16b 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -34,15 +34,16 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.indices.IndexTemplateAlreadyExistsException; import org.elasticsearch.indices.InvalidAliasNameException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; import org.junit.After; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -80,7 +81,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setSettings(indexSettings()) .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") @@ -90,7 +91,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .get(); client().admin().indices().preparePutTemplate("template_2") - .setTemplate("test*") + .setPatterns(Collections.singletonList("test*")) .setSettings(indexSettings()) .setOrder(1) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") @@ -100,14 +101,14 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { // test create param assertThrows(client().admin().indices().preparePutTemplate("template_2") - .setTemplate("test*") + .setPatterns(Collections.singletonList("test*")) .setSettings(indexSettings()) .setCreate(true) .setOrder(1) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field2").field("type", "text").field("store", false).endObject() .endObject().endObject().endObject()) - , IndexTemplateAlreadyExistsException.class + , IllegalArgumentException.class ); response = client().admin().indices().prepareGetTemplates().get(); @@ -152,7 +153,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { final int existingTemplates = admin().cluster().prepareState().execute().actionGet().getState().metaData().templates().size(); logger.info("--> put template_1 and template_2"); client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() @@ -161,7 +162,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .execute().actionGet(); client().admin().indices().preparePutTemplate("template_2") - .setTemplate("test*") + .setPatterns(Collections.singletonList("test*")) .setOrder(1) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field2").field("type", "text").field("store", "no").endObject() @@ -180,7 +181,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { logger.info("--> put template_1 back"); client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() @@ -202,7 +203,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { public void testThatGetIndexTemplatesWorks() throws Exception { logger.info("--> put template_1"); client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setOrder(0) .setVersion(123) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") @@ -215,7 +216,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { GetIndexTemplatesResponse getTemplate1Response = client().admin().indices().prepareGetTemplates("template_1").execute().actionGet(); assertThat(getTemplate1Response.getIndexTemplates(), hasSize(1)); assertThat(getTemplate1Response.getIndexTemplates().get(0), is(notNullValue())); - assertThat(getTemplate1Response.getIndexTemplates().get(0).getTemplate(), is("te*")); + assertThat(getTemplate1Response.getIndexTemplates().get(0).patterns(), is(Collections.singletonList("te*"))); assertThat(getTemplate1Response.getIndexTemplates().get(0).getOrder(), is(0)); assertThat(getTemplate1Response.getIndexTemplates().get(0).getVersion(), is(123)); @@ -228,7 +229,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { public void testThatGetIndexTemplatesWithSimpleRegexWorks() throws Exception { logger.info("--> put template_1"); client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() @@ -238,7 +239,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { logger.info("--> put template_2"); client().admin().indices().preparePutTemplate("template_2") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() @@ -248,7 +249,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { logger.info("--> put template3"); client().admin().indices().preparePutTemplate("template3") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() @@ -316,7 +317,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { MapperParsingException e = expectThrows( MapperParsingException.class, () -> client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .addMapping("type1", "abcde") .get()); assertThat(e.getMessage(), containsString("Failed to parse mapping ")); @@ -335,7 +336,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setSettings(Settings.builder().put("does_not_exist", "test")) .get()); assertEquals("unknown setting [index.does_not_exist] please check that any required plugins are" + @@ -353,7 +354,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { public void testIndexTemplateWithAliases() throws Exception { client().admin().indices().preparePutTemplate("template_with_aliases") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .addMapping("type1", "{\"type1\" : {\"properties\" : {\"value\" : {\"type\" : \"text\"}}}}") .addAlias(new Alias("simple_alias")) .addAlias(new Alias("templated_alias-{index}")) @@ -440,7 +441,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { public void testIndexTemplateWithAliasesSource() { client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setAliases( " {\n" + " \"alias1\" : {},\n" + @@ -478,7 +479,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { public void testDuplicateAlias() throws Exception { client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .addAlias(new Alias("my_alias").filter(termQuery("field", "value1"))) .addAlias(new Alias("my_alias").filter(termQuery("field", "value2"))) .get(); @@ -492,7 +493,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { public void testAliasInvalidFilterValidJson() throws Exception { //invalid filter but valid json: put index template works fine, fails during index creation client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .addAlias(new Alias("invalid_alias").filter("{ \"invalid\": {} }")).get(); GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates("template_1").get(); @@ -510,7 +511,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { public void testAliasInvalidFilterInvalidJson() throws Exception { //invalid json: put index template fails PutIndexTemplateRequestBuilder putIndexTemplateRequestBuilder = client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .addAlias(new Alias("invalid_alias").filter("abcde")); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -525,7 +526,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { createIndex("index"); client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .addAlias(new Alias("index")).get(); InvalidAliasNameException e = expectThrows(InvalidAliasNameException.class, @@ -535,7 +536,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { public void testAliasEmptyName() throws Exception { PutIndexTemplateRequestBuilder putIndexTemplateRequestBuilder = client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .addAlias(new Alias(" ").indexRouting("1,2,3")); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -545,7 +546,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { public void testAliasWithMultipleIndexRoutings() throws Exception { PutIndexTemplateRequestBuilder putIndexTemplateRequestBuilder = client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .addAlias(new Alias("alias").indexRouting("1,2,3")); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -555,7 +556,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { public void testMultipleAliasesPrecedence() throws Exception { client().admin().indices().preparePutTemplate("template1") - .setTemplate("*") + .setPatterns(Collections.singletonList("*")) .setOrder(0) .addAlias(new Alias("alias1")) .addAlias(new Alias("{index}-alias")) @@ -563,7 +564,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .addAlias(new Alias("alias4")).get(); client().admin().indices().preparePutTemplate("template2") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setOrder(1) .addAlias(new Alias("alias1").routing("test")) .addAlias(new Alias("alias3")).get(); @@ -593,27 +594,27 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { public void testStrictAliasParsingInIndicesCreatedViaTemplates() throws Exception { // Indexing into a should succeed, because the field mapping for field 'field' is defined in the test mapping. client().admin().indices().preparePutTemplate("template1") - .setTemplate("a*") + .setPatterns(Collections.singletonList("a*")) .setOrder(0) .addMapping("test", "field", "type=text") .addAlias(new Alias("alias1").filter(termQuery("field", "value"))).get(); // Indexing into b should succeed, because the field mapping for field 'field' is defined in the _default_ mapping and // the test type exists. client().admin().indices().preparePutTemplate("template2") - .setTemplate("b*") + .setPatterns(Collections.singletonList("b*")) .setOrder(0) .addMapping("_default_", "field", "type=text") .addMapping("test") .addAlias(new Alias("alias2").filter(termQuery("field", "value"))).get(); // Indexing into c should succeed, because the field mapping for field 'field' is defined in the _default_ mapping. client().admin().indices().preparePutTemplate("template3") - .setTemplate("c*") + .setPatterns(Collections.singletonList("c*")) .setOrder(0) .addMapping("_default_", "field", "type=text") .addAlias(new Alias("alias3").filter(termQuery("field", "value"))).get(); // Indexing into d index should fail, since there is field with name 'field' in the mapping client().admin().indices().preparePutTemplate("template4") - .setTemplate("d*") + .setPatterns(Collections.singletonList("d*")) .setOrder(0) .addAlias(new Alias("alias4").filter(termQuery("field", "value"))).get(); @@ -672,7 +673,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { //Now, a complete mapping with two separated templates is error // base template client().admin().indices().preparePutTemplate("template_1") - .setTemplate("*") + .setPatterns(Collections.singletonList("*")) .setSettings( " {\n" + " \"index\" : {\n" + @@ -690,7 +691,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { // put template using custom_1 analyzer MapperParsingException e = expectThrows(MapperParsingException.class, () -> client().admin().indices().preparePutTemplate("template_2") - .setTemplate("test*") + .setPatterns(Collections.singletonList("test*")) .setCreate(true) .setOrder(1) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") @@ -709,7 +710,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { Integer version = randomBoolean() ? randomInt() : null; assertAcked(client().admin().indices().preparePutTemplate("versioned_template") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setVersion(version) .setOrder(order) .addMapping("test", "field", "type=text") @@ -721,4 +722,46 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { assertThat(response.getIndexTemplates().get(0).getOrder(), equalTo(order)); } + public void testMultipleTemplate() throws IOException { + client().admin().indices().preparePutTemplate("template_1") + .setPatterns(Arrays.asList("a*", "b*")) + .setSettings(indexSettings()) + .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") + .startObject("field1").field("type", "text").field("store", true).endObject() + .startObject("field2").field("type", "keyword").field("store", false).endObject() + .endObject().endObject().endObject()) + .get(); + + client().prepareIndex("ax", "type1", "1") + .setSource("field1", "value1", "field2", "value2") + .setRefreshPolicy(IMMEDIATE).get(); + + client().prepareIndex("bx", "type1", "1") + .setSource("field1", "value1", "field2", "value2") + .setRefreshPolicy(IMMEDIATE).get(); + + ensureGreen(); + + // ax -> matches template + SearchResponse searchResponse = client().prepareSearch("ax") + .setQuery(termQuery("field1", "value1")) + .addStoredField("field1") + .addStoredField("field2") + .execute().actionGet(); + + assertHitCount(searchResponse, 1); + assertEquals("value1", searchResponse.getHits().getAt(0).field("field1").value().toString()); + assertNull(searchResponse.getHits().getAt(0).field("field2")); + + // bx -> matches template + searchResponse = client().prepareSearch("bx") + .setQuery(termQuery("field1", "value1")) + .addStoredField("field1") + .addStoredField("field2") + .execute().actionGet(); + + assertHitCount(searchResponse, 1); + assertEquals("value1", searchResponse.getHits().getAt(0).field("field1").value().toString()); + assertNull(searchResponse.getHits().getAt(0).field("field2")); + } } diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index 58042fc768f..f5ae13cf3a0 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -54,7 +54,7 @@ public class FileScriptTests extends ESTestCase { Settings settings = Settings.builder() .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", "false").build(); ScriptService scriptService = makeScriptService(settings); - Script script = new Script("script1", ScriptType.FILE, MockScriptEngine.NAME, null); + Script script = new Script(ScriptType.FILE, MockScriptEngine.NAME, "script1", Collections.emptyMap()); CompiledScript compiledScript = scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); assertNotNull(compiledScript); MockCompiledScript executable = (MockCompiledScript) compiledScript.compiled(); @@ -69,7 +69,7 @@ public class FileScriptTests extends ESTestCase { .put("script.engine." + MockScriptEngine.NAME + ".file.update", "false") .put("script.engine." + MockScriptEngine.NAME + ".file.ingest", "false").build(); ScriptService scriptService = makeScriptService(settings); - Script script = new Script("script1", ScriptType.FILE, MockScriptEngine.NAME, null); + Script script = new Script(ScriptType.FILE, MockScriptEngine.NAME, "script1", Collections.emptyMap()); for (ScriptContext context : ScriptContext.Standard.values()) { try { scriptService.compile(script, context, Collections.emptyMap()); diff --git a/core/src/test/java/org/elasticsearch/script/IndexLookupIT.java b/core/src/test/java/org/elasticsearch/script/IndexLookupIT.java index f45a6c01f8e..f41d9d7c394 100644 --- a/core/src/test/java/org/elasticsearch/script/IndexLookupIT.java +++ b/core/src/test/java/org/elasticsearch/script/IndexLookupIT.java @@ -683,7 +683,7 @@ public class IndexLookupIT extends ESIntegTestCase { } private Script createScript(String script) { - return new Script(script, ScriptType.INLINE, CustomScriptPlugin.NAME, null); + return new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, script, Collections.emptyMap()); } public void testFlags() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java index efff2b0834e..5423103f92d 100644 --- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java @@ -53,8 +53,7 @@ public class NativeScriptTests extends ESTestCase { scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED); ExecutableScript executable = scriptModule.getScriptService().executable( - new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), ScriptContext.Standard.SEARCH, - Collections.emptyMap()); + new Script(ScriptType.INLINE, NativeScriptEngineService.NAME, "my", Collections.emptyMap()), ScriptContext.Standard.SEARCH); assertThat(executable.run().toString(), equalTo("test")); } @@ -80,8 +79,8 @@ public class NativeScriptTests extends ESTestCase { scriptContextRegistry, scriptSettings); for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { - assertThat(scriptService.compile(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), scriptContext, - Collections.emptyMap()), notNullValue()); + assertThat(scriptService.compile(new Script(ScriptType.INLINE, NativeScriptEngineService.NAME, "my", Collections.emptyMap()), + scriptContext, Collections.emptyMap()), notNullValue()); } } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index 8346f7fed3e..9578e0c6281 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -58,7 +58,7 @@ public class ScriptContextTests extends ESTestCase { ScriptService scriptService = makeScriptService(); for (ScriptType scriptType : ScriptType.values()) { try { - Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); + Script script = new Script(scriptType, MockScriptEngine.NAME, "1", Collections.emptyMap()); scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalStateException e) { @@ -69,7 +69,7 @@ public class ScriptContextTests extends ESTestCase { public void testCustomScriptContextSettings() throws Exception { ScriptService scriptService = makeScriptService(); - Script script = new Script("1", ScriptType.INLINE, MockScriptEngine.NAME, null); + Script script = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "1", Collections.emptyMap()); try { scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), Collections.emptyMap()); fail("script compilation should have been rejected"); @@ -87,7 +87,7 @@ public class ScriptContextTests extends ESTestCase { ScriptService scriptService = makeScriptService(); for (ScriptType scriptType : ScriptType.values()) { try { - Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); + Script script = new Script(scriptType, MockScriptEngine.NAME, "1", Collections.emptyMap()); scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalArgumentException e) { @@ -106,7 +106,7 @@ public class ScriptContextTests extends ESTestCase { ScriptService scriptService = makeScriptService(); for (ScriptType scriptType : ScriptType.values()) { try { - Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); + Script script = new Script(scriptType, MockScriptEngine.NAME, "1", Collections.emptyMap()); scriptService.compile(script, context, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalArgumentException e) { diff --git a/core/src/test/java/org/elasticsearch/script/ScriptFieldIT.java b/core/src/test/java/org/elasticsearch/script/ScriptFieldIT.java index b7aeb0db47f..1a4a58e4b1e 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptFieldIT.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptFieldIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.test.ESIntegTestCase.Scope; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; @@ -60,10 +61,10 @@ public class ScriptFieldIT extends ESIntegTestCase { client().admin().indices().prepareFlush("test").execute().actionGet(); SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) - .addScriptField("int", new Script("int", ScriptType.INLINE, "native", null)) - .addScriptField("float", new Script("float", ScriptType.INLINE, "native", null)) - .addScriptField("double", new Script("double", ScriptType.INLINE, "native", null)) - .addScriptField("long", new Script("long", ScriptType.INLINE, "native", null)).execute().actionGet(); + .addScriptField("int", new Script(ScriptType.INLINE, "native", "int", Collections.emptyMap())) + .addScriptField("float", new Script(ScriptType.INLINE, "native", "float", Collections.emptyMap())) + .addScriptField("double", new Script(ScriptType.INLINE, "native", "double", Collections.emptyMap())) + .addScriptField("long", new Script(ScriptType.INLINE, "native", "long", Collections.emptyMap())).execute().actionGet(); assertThat(sr.getHits().hits().length, equalTo(6)); for (SearchHit hit : sr.getHits().getHits()) { Object result = hit.getFields().get("int").getValues().get(0); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 2b14ba3f4d0..fde01e59db2 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -169,7 +169,7 @@ public class ScriptServiceTests extends ESTestCase { Streams.copy("test_file".getBytes("UTF-8"), Files.newOutputStream(testFileWithExt)); resourceWatcherService.notifyNow(); - CompiledScript compiledScript = scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), + CompiledScript compiledScript = scriptService.compile(new Script(ScriptType.FILE, "test", "test_script", Collections.emptyMap()), ScriptContext.Standard.SEARCH, Collections.emptyMap()); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file")); @@ -178,7 +178,7 @@ public class ScriptServiceTests extends ESTestCase { resourceWatcherService.notifyNow(); try { - scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH, + scriptService.compile(new Script(ScriptType.FILE, "test", "test_script", Collections.emptyMap()), ScriptContext.Standard.SEARCH, Collections.emptyMap()); fail("the script test_script should no longer exist"); } catch (IllegalArgumentException ex) { @@ -196,7 +196,7 @@ public class ScriptServiceTests extends ESTestCase { Streams.copy("test_file_script".getBytes("UTF-8"), Files.newOutputStream(testFileScript)); resourceWatcherService.notifyNow(); - CompiledScript compiledScript = scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), + CompiledScript compiledScript = scriptService.compile(new Script(ScriptType.FILE, "test", "file_script", Collections.emptyMap()), ScriptContext.Standard.SEARCH, Collections.emptyMap()); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file_script")); @@ -207,9 +207,9 @@ public class ScriptServiceTests extends ESTestCase { public void testInlineScriptCompiledOnceCache() throws IOException { buildScriptService(Settings.EMPTY); - CompiledScript compiledScript1 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), + CompiledScript compiledScript1 = scriptService.compile(new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts), Collections.emptyMap()); - CompiledScript compiledScript2 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), + CompiledScript compiledScript2 = scriptService.compile(new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts), Collections.emptyMap()); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } @@ -332,8 +332,8 @@ public class ScriptServiceTests extends ESTestCase { String type = scriptEngineService.getType(); try { - scriptService.compile(new Script("test", randomFrom(ScriptType.values()), type, null), new ScriptContext.Plugin( - pluginName, unknownContext), Collections.emptyMap()); + scriptService.compile(new Script(randomFrom(ScriptType.values()), type, "test", Collections.emptyMap()), + new ScriptContext.Plugin(pluginName, unknownContext), Collections.emptyMap()); fail("script compilation should have been rejected"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported")); @@ -342,21 +342,20 @@ public class ScriptServiceTests extends ESTestCase { public void testCompileCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); - scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), + scriptService.compile(new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testExecutableCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); + scriptService.executable(new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts)); assertEquals(1L, scriptService.stats().getCompilations()); } public void testSearchCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); - scriptService.search(null, new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), - Collections.emptyMap()); + scriptService.search(null, new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts)); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -365,7 +364,7 @@ public class ScriptServiceTests extends ESTestCase { int numberOfCompilations = randomIntBetween(1, 1024); for (int i = 0; i < numberOfCompilations; i++) { scriptService - .compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), + .compile(new Script(ScriptType.INLINE, "test", i + " + " + i, Collections.emptyMap()), randomFrom(scriptContexts), Collections.emptyMap()); } assertEquals(numberOfCompilations, scriptService.stats().getCompilations()); @@ -376,22 +375,22 @@ public class ScriptServiceTests extends ESTestCase { builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); builder.put("script.inline", "true"); buildScriptService(builder.build()); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); + scriptService.executable(new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts)); + scriptService.executable(new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts)); assertEquals(1L, scriptService.stats().getCompilations()); } public void testFileScriptCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); createFileScripts("test"); - scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), + scriptService.compile(new Script(ScriptType.FILE, "test", "file_script", Collections.emptyMap()), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testIndexedScriptCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); - scriptService.compile(new Script("script", ScriptType.STORED, "test", null), randomFrom(scriptContexts), + scriptService.compile(new Script(ScriptType.STORED, "test", "script", Collections.emptyMap()), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -401,8 +400,8 @@ public class ScriptServiceTests extends ESTestCase { builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); builder.put("script.inline", "true"); buildScriptService(builder.build()); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); - scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); + scriptService.executable(new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts)); + scriptService.executable(new Script(ScriptType.INLINE, "test", "2+2", Collections.emptyMap()), randomFrom(scriptContexts)); assertEquals(2L, scriptService.stats().getCompilations()); assertEquals(1L, scriptService.stats().getCacheEvictions()); } @@ -411,7 +410,8 @@ public class ScriptServiceTests extends ESTestCase { Settings.Builder builder = Settings.builder(); builder.put("script.inline", "true"); buildScriptService(builder.build()); - CompiledScript script = scriptService.compile(new Script("1 + 1", ScriptType.INLINE, null, null), + CompiledScript script = scriptService.compile( + new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "1 + 1", Collections.emptyMap()), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(script.lang(), Script.DEFAULT_SCRIPT_LANG); } @@ -493,7 +493,7 @@ public class ScriptServiceTests extends ESTestCase { private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) { try { - scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, Collections.emptyMap()); + scriptService.compile(new Script(scriptType, lang, script, Collections.emptyMap()), scriptContext, Collections.emptyMap()); fail("compile should have been rejected for lang [" + lang + "], script_type [" + scriptType + "], scripted_op [" + scriptContext + "]"); } catch(IllegalStateException e) { //all good @@ -502,7 +502,7 @@ public class ScriptServiceTests extends ESTestCase { private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) { assertThat( - scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, Collections.emptyMap()), + scriptService.compile(new Script(scriptType, lang, script, Collections.emptyMap()), scriptContext, Collections.emptyMap()), notNullValue() ); } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptTests.java b/core/src/test/java/org/elasticsearch/script/ScriptTests.java index c6424fdbdfe..c1c25a500b2 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptTests.java @@ -65,7 +65,7 @@ public class ScriptTests extends ESTestCase { } private Script createScript(XContent xContent) throws IOException { - final Map params = randomBoolean() ? null : Collections.singletonMap("key", "value"); + final Map params = randomBoolean() ? Collections.emptyMap() : Collections.singletonMap("key", "value"); ScriptType scriptType = randomFrom(ScriptType.values()); String script; if (scriptType == ScriptType.INLINE) { @@ -79,11 +79,12 @@ public class ScriptTests extends ESTestCase { script = randomAsciiOfLengthBetween(1, 5); } return new Script( - script, - scriptType, - randomFrom("_lang1", "_lang2", null), - params, - scriptType == ScriptType.INLINE ? xContent.type() : null + scriptType, + randomFrom("_lang1", "_lang2", "_lang3"), + script, + scriptType == ScriptType.INLINE ? + Collections.singletonMap(Script.CONTENT_TYPE_OPTION, xContent.type().mediaType()) : Collections.emptyMap(), + params ); } diff --git a/core/src/test/java/org/elasticsearch/search/SearchCancellationIT.java b/core/src/test/java/org/elasticsearch/search/SearchCancellationIT.java index 53653392e6e..840cdc8af6c 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/core/src/test/java/org/elasticsearch/search/SearchCancellationIT.java @@ -147,7 +147,8 @@ public class SearchCancellationIT extends ESIntegTestCase { logger.info("Executing search"); ListenableActionFuture searchResponse = client().prepareSearch("test").setQuery( - scriptQuery(new Script(NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, ScriptType.INLINE, "native", null))) + scriptQuery(new Script( + ScriptType.INLINE, "native", NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, Collections.emptyMap()))) .execute(); awaitForBlock(plugins); @@ -165,7 +166,7 @@ public class SearchCancellationIT extends ESIntegTestCase { logger.info("Executing search"); ListenableActionFuture searchResponse = client().prepareSearch("test") .addScriptField("test_field", - new Script(NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, ScriptType.INLINE, "native", null) + new Script(ScriptType.INLINE, "native", NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, Collections.emptyMap()) ).execute(); awaitForBlock(plugins); @@ -175,7 +176,6 @@ public class SearchCancellationIT extends ESIntegTestCase { ensureSearchWasCancelled(searchResponse); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/21126") public void testCancellationOfScrollSearches() throws Exception { List plugins = initBlockFactory(); @@ -186,17 +186,22 @@ public class SearchCancellationIT extends ESIntegTestCase { .setScroll(TimeValue.timeValueSeconds(10)) .setSize(5) .setQuery( - scriptQuery(new Script(NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, ScriptType.INLINE, "native", null))) + scriptQuery(new Script( + ScriptType.INLINE, "native", NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, Collections.emptyMap()))) .execute(); awaitForBlock(plugins); cancelSearch(SearchAction.NAME); disableBlocks(plugins); - ensureSearchWasCancelled(searchResponse); + SearchResponse response = ensureSearchWasCancelled(searchResponse); + if (response != null) { + // The response might not have failed on all shards - we need to clean scroll + logger.info("Cleaning scroll with id {}", response.getScrollId()); + client().prepareClearScroll().addScrollId(response.getScrollId()).get(); + } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/21126") public void testCancellationOfScrollSearchesOnFollowupRequests() throws Exception { List plugins = initBlockFactory(); @@ -211,7 +216,8 @@ public class SearchCancellationIT extends ESIntegTestCase { .setScroll(keepAlive) .setSize(2) .setQuery( - scriptQuery(new Script(NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, ScriptType.INLINE, "native", null))) + scriptQuery(new Script( + ScriptType.INLINE, "native", NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, Collections.emptyMap()))) .get(); assertNotNull(searchResponse.getScrollId()); diff --git a/core/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/core/src/test/java/org/elasticsearch/search/SearchCancellationTests.java index a50b7edf57a..48efc189ed2 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchCancellationTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchCancellationTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.TotalHitCountCollector; @@ -46,7 +47,9 @@ public class SearchCancellationTests extends ESTestCase { public static void before() throws IOException { dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); - w.setDoRandomForceMerge(false); // we need 2 segments + // we need at least 2 segments - so no merges should be allowed + w.w.getConfig().setMergePolicy(NoMergePolicy.INSTANCE); + w.setDoRandomForceMerge(false); indexRandomDocuments(w, TestUtil.nextInt(random(), 2, 20)); w.flush(); indexRandomDocuments(w, TestUtil.nextInt(random(), 1, 20)); @@ -56,10 +59,6 @@ public class SearchCancellationTests extends ESTestCase { private static void indexRandomDocuments(RandomIndexWriter w, int numDocs) throws IOException { for (int i = 0; i < numDocs; ++i) { - final int numHoles = random().nextInt(5); - for (int j = 0; j < numHoles; ++j) { - w.addDocument(new Document()); - } Document doc = new Document(); doc.add(new StringField("foo", "bar", Field.Store.NO)); w.addDocument(doc); diff --git a/core/src/test/java/org/elasticsearch/search/SearchTimeoutIT.java b/core/src/test/java/org/elasticsearch/search/SearchTimeoutIT.java index be133c37b31..6a6838a9c4f 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchTimeoutIT.java +++ b/core/src/test/java/org/elasticsearch/search/SearchTimeoutIT.java @@ -58,7 +58,8 @@ public class SearchTimeoutIT extends ESIntegTestCase { client().prepareIndex("test", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) - .setQuery(scriptQuery(new Script(NativeTestScriptedTimeout.TEST_NATIVE_SCRIPT_TIMEOUT, ScriptType.INLINE, "native", null))) + .setQuery(scriptQuery( + new Script(ScriptType.INLINE, "native", NativeTestScriptedTimeout.TEST_NATIVE_SCRIPT_TIMEOUT, Collections.emptyMap()))) .execute().actionGet(); assertThat(searchResponse.isTimedOut(), equalTo(true)); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java index b9a921c7662..bc98dda41d6 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java @@ -44,7 +44,7 @@ public class AggregationTestScriptsPlugin extends MockScriptPlugin { // res[i] = values.get(i) - dec; // }; // return res; - public static final Script DECREMENT_ALL_VALUES = new Script("decrement all values", ScriptType.INLINE, NAME, singletonMap("dec", 1)); + public static final Script DECREMENT_ALL_VALUES = new Script(ScriptType.INLINE, NAME, "decrement all values", singletonMap("dec", 1)); @Override protected Map, Object>> pluginScripts() { @@ -91,7 +91,7 @@ public class AggregationTestScriptsPlugin extends MockScriptPlugin { return doc.get("values"); }); - scripts.put(DECREMENT_ALL_VALUES.getScript(), vars -> { + scripts.put(DECREMENT_ALL_VALUES.getIdOrCode(), vars -> { int dec = (int) vars.get("dec"); Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs values = (ScriptDocValues.Longs) doc.get("values"); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java index 4e1d8b6648b..00f82dfbe44 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java @@ -383,7 +383,7 @@ public class EquivalenceIT extends ESIntegTestCase { terms("terms") .field("values") .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("floor(_value / interval)", ScriptType.INLINE, CustomScriptPlugin.NAME, params)) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / interval)", params)) .size(maxNumTerms)) .addAggregation( histogram("histo") diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index bca3eb9c9dc..90342df179e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -496,7 +496,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params)) + .script(new Script(ScriptType.INLINE, "native", DateScriptMocks.PlusOneMonthScript.NAME, params)) .dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet(); assertSearchResponse(response); @@ -632,7 +632,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("dates") - .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params)) + .script(new Script(ScriptType.INLINE, "native", DateScriptMocks.PlusOneMonthScript.NAME, params)) .dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet(); assertSearchResponse(response); @@ -684,8 +684,8 @@ public class DateHistogramIT extends ESIntegTestCase { Map params = new HashMap<>(); params.put("fieldname", "date"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script(new Script(DateScriptMocks.ExtractFieldScript.NAME, - ScriptType.INLINE, "native", params)).dateHistogramInterval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").script(new Script(ScriptType.INLINE, "native", DateScriptMocks.ExtractFieldScript.NAME, + params)).dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -722,8 +722,8 @@ public class DateHistogramIT extends ESIntegTestCase { Map params = new HashMap<>(); params.put("fieldname", "dates"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script(new Script(DateScriptMocks.ExtractFieldScript.NAME, - ScriptType.INLINE, "native", params)).dateHistogramInterval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").script(new Script(ScriptType.INLINE, "native", DateScriptMocks.ExtractFieldScript.NAME, + params)).dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -1214,7 +1214,7 @@ public class DateHistogramIT extends ESIntegTestCase { Map params = new HashMap<>(); params.put("fieldname", "d"); SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateHistogram("histo").field("d") - .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params)) + .script(new Script(ScriptType.INLINE, "native", DateScriptMocks.PlusOneMonthScript.NAME, params)) .dateHistogramInterval(DateHistogramInterval.MONTH)).get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index 5fe0880868a..71d97d3969e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -119,7 +119,7 @@ public class DateRangeIT extends ESIntegTestCase { if (randomBoolean()) { rangeBuilder.field("date"); } else { - rangeBuilder.script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params)); + rangeBuilder.script(new Script(ScriptType.INLINE, "native", DateScriptMocks.ExtractFieldScript.NAME, params)); } SearchResponse response = client() .prepareSearch("idx") @@ -541,7 +541,7 @@ public class DateRangeIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateRange("range") .field("dates") - .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params)) + .script(new Script(ScriptType.INLINE, "native", DateScriptMocks.PlusOneMonthScript.NAME, params)) .addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15))).execute() .actionGet(); @@ -597,7 +597,7 @@ public class DateRangeIT extends ESIntegTestCase { params.put("fieldname", "date"); SearchResponse response = client().prepareSearch("idx") .addAggregation(dateRange("range") - .script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params)) + .script(new Script(ScriptType.INLINE, "native", DateScriptMocks.ExtractFieldScript.NAME, params)) .addUnboundedTo(date(2, 15)) .addRange(date(2, 15), date(3, 15)) .addUnboundedFrom(date(3, 15))) @@ -659,7 +659,7 @@ public class DateRangeIT extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation( - dateRange("range").script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params)) + dateRange("range").script(new Script(ScriptType.INLINE, "native", DateScriptMocks.ExtractFieldScript.NAME, params)) .addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)) .addUnboundedFrom(date(3, 15))).execute().actionGet(); @@ -889,7 +889,7 @@ public class DateRangeIT extends ESIntegTestCase { Map params = new HashMap<>(); params.put("fieldname", "date"); SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date") - .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params)) + .script(new Script(ScriptType.INLINE, "native", DateScriptMocks.PlusOneMonthScript.NAME, params)) .addRange(new DateTime(2012, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC), new DateTime(2013, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC))) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index e92485e6421..1dc9943e8a3 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -452,7 +452,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(SINGLE_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -505,7 +505,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(MULTI_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -534,7 +534,8 @@ public class DoubleTermsIT extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(MULTI_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("(long) (_value / 1000 + 1)", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .script( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "(long) (_value / 1000 + 1)", Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -576,8 +577,8 @@ public class DoubleTermsIT extends AbstractTermsTestCase { .addAggregation( terms("terms") .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, - CustomScriptPlugin.NAME, null))) + .script(new Script(ScriptType.INLINE, + CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -603,8 +604,8 @@ public class DoubleTermsIT extends AbstractTermsTestCase { .addAggregation( terms("terms") .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']", ScriptType.INLINE, - CustomScriptPlugin.NAME, null))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", + Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -1078,10 +1079,11 @@ public class DoubleTermsIT extends AbstractTermsTestCase { } public void testScriptScore() { - Script scoringScript = - new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, CustomScriptPlugin .NAME, null); + Script scoringScript = new Script( + ScriptType.INLINE, CustomScriptPlugin .NAME, "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()); - Script aggregationScript = new Script("ceil(_score.doubleValue()/3)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script aggregationScript = new Script( + ScriptType.INLINE, CustomScriptPlugin.NAME, "ceil(_score.doubleValue()/3)", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -1200,7 +1202,8 @@ public class DoubleTermsIT extends AbstractTermsTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - terms("terms").field("d").script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))).get(); + terms("terms").field("d").script( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java index 7e7815de54f..e25aeae29ff 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java @@ -562,7 +562,7 @@ public class HistogramIT extends ESIntegTestCase { .addAggregation( histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) - .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) .interval(interval)) .execute().actionGet(); @@ -639,7 +639,7 @@ public class HistogramIT extends ESIntegTestCase { .addAggregation( histogram("histo") .field(MULTI_VALUED_FIELD_NAME) - .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) .interval(interval)) .execute().actionGet(); @@ -675,7 +675,7 @@ public class HistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation( histogram("histo") - .script(new Script("doc['l_value'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['l_value'].value", emptyMap())) .interval(interval)) .execute().actionGet(); @@ -699,7 +699,7 @@ public class HistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation( histogram("histo") - .script(new Script("doc['l_values']", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['l_values']", emptyMap())) .interval(interval)) .execute().actionGet(); @@ -1016,7 +1016,7 @@ public class HistogramIT extends ESIntegTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(histogram("histo").field("d") - .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap())).interval(0.7).offset(0.05)).get(); + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())).interval(0.7).offset(0.05)).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java index 7e7b11c8591..cc4818963ad 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java @@ -208,7 +208,7 @@ public class IpRangeIT extends ESIntegTestCase { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().prepareSearch("idx").addAggregation( AggregationBuilders.ipRange("my_range") - .script(new Script(DummyScript.NAME, ScriptType.INLINE, "native", Collections.emptyMap())) ).get()); + .script(new Script(ScriptType.INLINE, "native", DummyScript.NAME, Collections.emptyMap())) ).get()); assertThat(e.getMessage(), containsString("[ip_range] does not support scripts")); } @@ -217,7 +217,7 @@ public class IpRangeIT extends ESIntegTestCase { () -> client().prepareSearch("idx").addAggregation( AggregationBuilders.ipRange("my_range") .field("ip") - .script(new Script(DummyScript.NAME, ScriptType.INLINE, "native", Collections.emptyMap())) ).get()); + .script(new Script(ScriptType.INLINE, "native", DummyScript.NAME, Collections.emptyMap())) ).get()); assertThat(e.getMessage(), containsString("[ip_range] does not support scripts")); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java index b97773311c6..1739d09a054 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java @@ -442,7 +442,7 @@ public class LongTermsIT extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(SINGLE_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -495,7 +495,7 @@ public class LongTermsIT extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(MULTI_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("_value - 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value - 1", Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -524,7 +524,8 @@ public class LongTermsIT extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(MULTI_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("floor(_value / 1000 + 1)", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .script(new Script( + ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / 1000 + 1)", Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -560,7 +561,8 @@ public class LongTermsIT extends AbstractTermsTestCase { */ public void testScriptSingleValue() throws Exception { - Script script = new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") @@ -586,7 +588,8 @@ public class LongTermsIT extends AbstractTermsTestCase { } public void testScriptMultiValued() throws Exception { - Script script = new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") @@ -1157,7 +1160,8 @@ public class LongTermsIT extends AbstractTermsTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - terms("terms").field("d").script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))).get(); + terms("terms").field("d").script( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java index 91885d5f5a6..925ff86232a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java @@ -151,8 +151,8 @@ public class MinDocCountIT extends AbstractTermsTestCase { YES { @Override TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field) { - return builder.script(new org.elasticsearch.script.Script("doc['" + field + "'].values", ScriptType.INLINE, - CustomScriptPlugin.NAME, null)); + return builder.script(new org.elasticsearch.script.Script(ScriptType.INLINE, + CustomScriptPlugin.NAME, "doc['" + field + "'].values", Collections.emptyMap())); } }; abstract TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index eb2a668bcbd..fc4fac72ee7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -394,7 +394,7 @@ public class RangeIT extends ESIntegTestCase { .addAggregation( range("range") .field(SINGLE_VALUED_FIELD_NAME) - .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) .addUnboundedTo(3) .addRange(3, 6) .addUnboundedFrom(6)) @@ -514,7 +514,7 @@ public class RangeIT extends ESIntegTestCase { .addAggregation( range("range") .field(MULTI_VALUED_FIELD_NAME) - .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) .addUnboundedTo(3) .addRange(3, 6) .addUnboundedFrom(6)) @@ -575,7 +575,8 @@ public class RangeIT extends ESIntegTestCase { */ public void testScriptSingleValue() throws Exception { - Script script = new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") .addAggregation( @@ -660,7 +661,8 @@ public class RangeIT extends ESIntegTestCase { } public void testScriptMultiValued() throws Exception { - Script script = new Script("doc['" + MULTI_VALUED_FIELD_NAME + "'].values", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "'].values", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -933,7 +935,8 @@ public class RangeIT extends ESIntegTestCase { Map params = new HashMap<>(); params.put("fieldname", "date"); SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - range("foo").field("i").script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null)).addRange(0, 10)) + range("foo").field("i").script( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())).addRange(0, 10)) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index d9bf07d4d3a..8852716377e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -56,6 +56,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -507,14 +508,13 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { } private ScriptHeuristic getScriptSignificanceHeuristic() throws IOException { - Script script = null; + Script script; if (randomBoolean()) { - Map params = null; - params = new HashMap<>(); + Map params = new HashMap<>(); params.put("param", randomIntBetween(1, 100)); - script = new Script("native_significance_score_script_with_params", ScriptType.INLINE, "native", params); + script = new Script(ScriptType.INLINE, "native", "native_significance_score_script_with_params", params); } else { - script = new Script("native_significance_score_script_no_params", ScriptType.INLINE, "native", null); + script = new Script(ScriptType.INLINE, "native", "native_significance_score_script_no_params", Collections.emptyMap()); } return new ScriptHeuristic(script); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java index 30f9e3c97aa..3d5d13bf04a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java @@ -575,7 +575,7 @@ public class StringTermsIT extends AbstractTermsTestCase { .executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("'foo_' + _value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -602,7 +602,8 @@ public class StringTermsIT extends AbstractTermsTestCase { .executionHint(randomExecutionHint()) .field(MULTI_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("_value.substring(0,3)", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .script(new Script( + ScriptType.INLINE, CustomScriptPlugin.NAME, "_value.substring(0,3)", Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -652,7 +653,8 @@ public class StringTermsIT extends AbstractTermsTestCase { .addAggregation( terms("terms") .executionHint(randomExecutionHint()) - .script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .script(new Script(ScriptType.INLINE, + CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", Collections.emptyMap())) .collectMode(randomFrom(SubAggCollectionMode.values()))) .get(); @@ -684,7 +686,7 @@ public class StringTermsIT extends AbstractTermsTestCase { .executionHint(randomExecutionHint()) .field(MULTI_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("'foo_' + _value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -719,7 +721,8 @@ public class StringTermsIT extends AbstractTermsTestCase { */ public void testScriptSingleValue() throws Exception { - Script script = new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -747,7 +750,8 @@ public class StringTermsIT extends AbstractTermsTestCase { } public void testScriptSingleValueExplicitSingleValue() throws Exception { - Script script = new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -782,7 +786,8 @@ public class StringTermsIT extends AbstractTermsTestCase { terms("terms") .collectMode(randomFrom(SubAggCollectionMode.values())) .executionHint(randomExecutionHint()) - .script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .script(new Script(ScriptType.INLINE, + CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", Collections.emptyMap()))) .get(); assertSearchResponse(response); @@ -1533,7 +1538,8 @@ public class StringTermsIT extends AbstractTermsTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) .addAggregation( - terms("terms").field("d").script(new Script("'foo_' + _value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + terms("terms").field("d").script( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap()))) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java index a1ca5411292..66fd9653c4c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java @@ -167,7 +167,7 @@ public class AvgIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(avg("avg").field("value") - .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))) + .script(new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", Collections.emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -184,7 +184,7 @@ public class AvgIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(avg("avg").field("value") - .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params))) + .script(new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", params))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -228,7 +228,7 @@ public class AvgIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(avg("avg").field("values") - .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))) + .script(new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", Collections.emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -245,7 +245,7 @@ public class AvgIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(avg("avg").field("values") - .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params))) + .script(new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", params))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -261,7 +261,7 @@ public class AvgIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(avg("avg") - .script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null))) + .script(new Script(ScriptType.INLINE, ExtractFieldScriptEngine.NAME, "value", Collections.emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -278,7 +278,7 @@ public class AvgIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(avg("avg") - .script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params))) + .script(new Script(ScriptType.INLINE, ExtractFieldScriptEngine.NAME, "value", params))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -294,7 +294,7 @@ public class AvgIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(avg("avg") - .script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null))) + .script(new Script(ScriptType.INLINE, ExtractFieldScriptEngine.NAME, "values", Collections.emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -311,7 +311,7 @@ public class AvgIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(avg("avg") - .script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params))) + .script(new Script(ScriptType.INLINE, ExtractFieldScriptEngine.NAME, "values", params))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -371,7 +371,8 @@ public class AvgIT extends AbstractNumericTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(avg("foo").field("d").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))).get(); + .addAggregation(avg("foo").field("d").script( + new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", Collections.emptyMap()))).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java index 4c22a38f6f6..17785d2cb32 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java @@ -306,7 +306,7 @@ public class CardinalityIT extends ESIntegTestCase { .addAggregation( cardinality("cardinality") .precisionThreshold(precisionThreshold) - .script(new Script("doc['str_value'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_value'].value", emptyMap()))) .execute().actionGet(); assertSearchResponse(response); @@ -322,7 +322,7 @@ public class CardinalityIT extends ESIntegTestCase { .addAggregation( cardinality("cardinality") .precisionThreshold(precisionThreshold) - .script(new Script("doc['str_values'].values", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_values'].values", emptyMap()))) .execute().actionGet(); assertSearchResponse(response); @@ -334,7 +334,7 @@ public class CardinalityIT extends ESIntegTestCase { } public void testSingleValuedNumericScript() throws Exception { - Script script = new Script("doc[' + singleNumericField() + '].value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc[' + singleNumericField() + '].value", emptyMap()); SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script)) .execute().actionGet(); @@ -348,7 +348,8 @@ public class CardinalityIT extends ESIntegTestCase { } public void testMultiValuedNumericScript() throws Exception { - Script script = new Script("doc[' + multiNumericField(false) + '].values", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc[' + multiNumericField(false) + '].values", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script)) .execute().actionGet(); @@ -367,7 +368,7 @@ public class CardinalityIT extends ESIntegTestCase { cardinality("cardinality") .precisionThreshold(precisionThreshold) .field("str_value") - .script(new Script("_value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) .execute().actionGet(); assertSearchResponse(response); @@ -384,7 +385,7 @@ public class CardinalityIT extends ESIntegTestCase { cardinality("cardinality") .precisionThreshold(precisionThreshold) .field("str_values") - .script(new Script("_value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) .execute().actionGet(); assertSearchResponse(response); @@ -401,7 +402,7 @@ public class CardinalityIT extends ESIntegTestCase { cardinality("cardinality") .precisionThreshold(precisionThreshold) .field(singleNumericField()) - .script(new Script("_value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) .execute().actionGet(); assertSearchResponse(response); @@ -418,7 +419,7 @@ public class CardinalityIT extends ESIntegTestCase { cardinality("cardinality") .precisionThreshold(precisionThreshold) .field(multiNumericField(false)) - .script(new Script("_value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) .execute().actionGet(); assertSearchResponse(response); @@ -467,7 +468,7 @@ public class CardinalityIT extends ESIntegTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) .addAggregation( - cardinality("foo").field("d").script(new Script("_value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) + cardinality("foo").field("d").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index ce700d22da0..6eb0bba6ae4 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -301,7 +301,8 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation( extendedStats("stats") .field("value") - .script(new Script("_value + 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null)) + .script(new Script(ScriptType.INLINE, + AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) .sigma(sigma)) .execute().actionGet(); @@ -331,7 +332,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation( extendedStats("stats") .field("value") - .script(new Script("_value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params)) .sigma(sigma)) .execute().actionGet(); @@ -383,7 +384,8 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation( extendedStats("stats") .field("values") - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null)) + .script(new Script(ScriptType.INLINE, + AggregationTestScriptsPlugin.NAME, "_value - 1", Collections.emptyMap())) .sigma(sigma)) .execute().actionGet(); @@ -413,7 +415,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation( extendedStats("stats") .field("values") - .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) .sigma(sigma)) .get(); @@ -440,7 +442,8 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( extendedStats("stats") - .script(new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null)) + .script(new Script(ScriptType.INLINE, + AggregationTestScriptsPlugin.NAME, "doc['value'].value", Collections.emptyMap())) .sigma(sigma)) .execute().actionGet(); @@ -465,7 +468,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("inc", 1); - Script script = new Script("doc['value'].value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value + inc", params); double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") @@ -499,7 +502,8 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( extendedStats("stats") - .script(new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null)) + .script(new Script(ScriptType.INLINE, + AggregationTestScriptsPlugin.NAME, "doc['values'].values", Collections.emptyMap())) .sigma(sigma)) .execute().actionGet(); @@ -524,8 +528,8 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("dec", 1); - Script script = new Script("[ doc['value'].value, doc['value'].value - dec ]", ScriptType.INLINE, - AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "[ doc['value'].value, doc['value'].value - dec ]", + params); double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") @@ -653,7 +657,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) .addAggregation(extendedStats("foo").field("d") - .script(new Script("_value + 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null))) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index a77388fc03a..29184285195 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -260,7 +260,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { .method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) .field("value") - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) .values(pcts)) .execute().actionGet(); @@ -284,7 +284,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { .method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) .field("value") - .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) .values(pcts)) .execute().actionGet(); @@ -324,7 +324,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { .method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) .field("values") - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) .values(pcts)) .execute().actionGet(); @@ -345,7 +345,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { .method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) .field("values") - .script(new Script("20 - _value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "20 - _value", emptyMap())) .values(pcts)) .execute().actionGet(); @@ -369,7 +369,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { .method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) .field("values") - .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) .values(pcts)) .execute().actionGet(); @@ -390,7 +390,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks") .method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) - .script(new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap())) .values(pcts)) .execute().actionGet(); @@ -406,7 +406,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("dec", 1); - Script script = new Script("doc['value'].value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value - dec", params); final double[] pcts = randomPercents(minValue - 1, maxValue - 1); SearchResponse searchResponse = client() @@ -431,7 +431,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValues, maxValues); - Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values'].values", emptyMap()); SearchResponse searchResponse = client() .prepareSearch("idx") @@ -553,7 +553,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { // Test that a request using a script does not get cached SearchResponse r = client() .prepareSearch("cache_test_idx").setSize(0).addAggregation(percentileRanks("foo").method(PercentilesMethod.HDR).field("d") - .values(50.0).script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .values(50.0).script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index 6e27e5b5042..32fdd02a876 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -245,7 +245,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) .field("value") - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) .percentiles(pcts)) .execute().actionGet(); @@ -270,7 +270,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) .field("value") - .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) .percentiles(pcts)) .execute().actionGet(); @@ -310,7 +310,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) .field("values") - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) .percentiles(pcts)) .execute().actionGet(); @@ -331,7 +331,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) .field("values") - .script(new Script("20 - _value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "20 - _value", emptyMap())) .percentiles(pcts)) .execute().actionGet(); @@ -356,7 +356,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) .field("values") - .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) .percentiles(pcts)) .execute().actionGet(); @@ -377,7 +377,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { percentiles("percentiles") .numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) - .script(new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap())) .percentiles(pcts)) .execute().actionGet(); @@ -392,7 +392,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("dec", 1); - Script script = new Script("doc['value'].value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value - dec", params); final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); @@ -418,7 +418,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values'].values", emptyMap()); SearchResponse searchResponse = client() .prepareSearch("idx") @@ -544,7 +544,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) .addAggregation(percentiles("foo").method(PercentilesMethod.HDR).field("d").percentiles(50.0) - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java index c692082aaaa..4a651a0ad55 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java @@ -165,7 +165,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation( max("max") .field("value") - .script(new Script("_value + 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -185,7 +185,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation( max("max") .field("value") - .script(new Script("_value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params))) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params))) .get(); assertHitCount(searchResponse, 10); @@ -218,7 +218,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation( max("max") .field("values") - .script(new Script("_value + 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", emptyMap()))) .get(); assertHitCount(searchResponse, 10); @@ -238,7 +238,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation( max("max") .field("values") - .script(new Script("_value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params))) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params))) .get(); assertHitCount(searchResponse, 10); @@ -255,7 +255,7 @@ public class MaxIT extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( max("max") - .script(new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -271,7 +271,7 @@ public class MaxIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("inc", 1); - Script script = new Script("doc['value'].value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value + inc", params); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -292,7 +292,8 @@ public class MaxIT extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( max("max") - .script(new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null))) + .script(new Script(ScriptType.INLINE, + AggregationTestScriptsPlugin.NAME, "doc['values'].values", Collections.emptyMap()))) .get(); assertHitCount(searchResponse, 10); @@ -308,8 +309,8 @@ public class MaxIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("inc", 1); - Script script = new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, - AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "[ doc['value'].value, doc['value'].value + inc ]", + params); SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(max("max").script(script)) @@ -372,7 +373,7 @@ public class MaxIT extends AbstractNumericTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - max("foo").field("d").script(new Script("_value + 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + max("foo").field("d").script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", emptyMap()))) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java index dc6c7d0853d..dde1c840ee6 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java @@ -166,7 +166,7 @@ public class MinIT extends AbstractNumericTestCase { .addAggregation( min("min") .field("value") - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) .get(); assertHitCount(searchResponse, 10); @@ -182,7 +182,7 @@ public class MinIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("dec", 1); - Script script = new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -219,7 +219,7 @@ public class MinIT extends AbstractNumericTestCase { .addAggregation( min("min") .field("values") - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) .get(); assertHitCount(searchResponse, 10); @@ -237,7 +237,7 @@ public class MinIT extends AbstractNumericTestCase { .addAggregation( min("min") .field("values") - .script(new Script("_value * -1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value * -1", emptyMap()))) .get(); assertHitCount(searchResponse, 10); @@ -253,7 +253,7 @@ public class MinIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("dec", 1); - Script script = new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params); SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(min("min").field("values").script(script)) @@ -269,7 +269,7 @@ public class MinIT extends AbstractNumericTestCase { @Override public void testScriptSingleValued() throws Exception { - Script script = new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()); SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(min("min").script(script)) @@ -288,7 +288,7 @@ public class MinIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("dec", 1); - Script script = new Script("doc['value'].value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value - dec", params); SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(min("min").script(script)) @@ -304,7 +304,7 @@ public class MinIT extends AbstractNumericTestCase { @Override public void testScriptMultiValued() throws Exception { - Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values'].values", emptyMap()); SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(min("min").script(script)) .get(); @@ -385,7 +385,7 @@ public class MinIT extends AbstractNumericTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - min("foo").field("d").script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + min("foo").field("d").script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 12f512efaaf..545c10bcb03 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -277,7 +277,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { } public void testMap() { - Script mapScript = new Script("_agg['count'] = 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg['count'] = 1", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -317,7 +317,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); - Script mapScript = new Script("_agg.add(1)", ScriptType.INLINE, CustomScriptPlugin.NAME, params); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(1)", params); SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -365,8 +365,10 @@ public class ScriptedMetricIT extends ESIntegTestCase { .addAggregation( scriptedMetric("scripted") .params(params) - .initScript(new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) - .mapScript(new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .initScript( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap())) + .mapScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "_agg.add(vars.multiplier)", Collections.emptyMap()))) .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -404,8 +406,9 @@ public class ScriptedMetricIT extends ESIntegTestCase { params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script mapScript = new Script("_agg.add(1)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(1)", Collections.emptyMap()); + Script combineScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -455,9 +458,10 @@ public class ScriptedMetricIT extends ESIntegTestCase { params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script combineScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -508,10 +512,12 @@ public class ScriptedMetricIT extends ESIntegTestCase { params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script combineScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -551,10 +557,12 @@ public class ScriptedMetricIT extends ESIntegTestCase { params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script combineScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); SearchResponse searchResponse = client() .prepareSearch("idx") @@ -605,9 +613,11 @@ public class ScriptedMetricIT extends ESIntegTestCase { params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script combineScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -645,9 +655,10 @@ public class ScriptedMetricIT extends ESIntegTestCase { params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -684,8 +695,9 @@ public class ScriptedMetricIT extends ESIntegTestCase { params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -725,11 +737,12 @@ public class ScriptedMetricIT extends ESIntegTestCase { Map reduceParams = new HashMap<>(); reduceParams.put("multiplier", 4); - Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script reduceScript = new Script("multiplied sum aggs of agg values as a new aggregation", ScriptType.INLINE, - CustomScriptPlugin.NAME, reduceParams); + Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script combineScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "multiplied sum aggs of agg values as a new aggregation", reduceParams); SearchResponse response = client() .prepareSearch("idx") @@ -774,10 +787,14 @@ public class ScriptedMetricIT extends ESIntegTestCase { .addAggregation( scriptedMetric("scripted") .params(params) - .initScript(new Script("initScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null)) - .mapScript(new Script("mapScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null)) - .combineScript(new Script("combineScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null)) - .reduceScript(new Script("reduceScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null))) + .initScript( + new Script(ScriptType.STORED, CustomScriptPlugin.NAME, "initScript_stored", Collections.emptyMap())) + .mapScript( + new Script(ScriptType.STORED, CustomScriptPlugin.NAME, "mapScript_stored", Collections.emptyMap())) + .combineScript( + new Script(ScriptType.STORED, CustomScriptPlugin.NAME, "combineScript_stored", Collections.emptyMap())) + .reduceScript( + new Script(ScriptType.STORED, CustomScriptPlugin.NAME, "reduceScript_stored", Collections.emptyMap()))) .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -810,10 +827,12 @@ public class ScriptedMetricIT extends ESIntegTestCase { .addAggregation( scriptedMetric("scripted") .params(params) - .initScript(new Script("init_script", ScriptType.FILE, CustomScriptPlugin.NAME, null)) - .mapScript(new Script("map_script", ScriptType.FILE, CustomScriptPlugin.NAME, null)) - .combineScript(new Script("combine_script", ScriptType.FILE, CustomScriptPlugin.NAME, null)) - .reduceScript(new Script("reduce_script", ScriptType.FILE, CustomScriptPlugin.NAME, null))) + .initScript(new Script(ScriptType.FILE, CustomScriptPlugin.NAME, "init_script", Collections.emptyMap())) + .mapScript(new Script(ScriptType.FILE, CustomScriptPlugin.NAME, "map_script", Collections.emptyMap())) + .combineScript( + new Script(ScriptType.FILE, CustomScriptPlugin.NAME, "combine_script", Collections.emptyMap())) + .reduceScript( + new Script(ScriptType.FILE, CustomScriptPlugin.NAME, "reduce_script", Collections.emptyMap()))) .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -841,10 +860,12 @@ public class ScriptedMetricIT extends ESIntegTestCase { params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script combineScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -900,10 +921,12 @@ public class ScriptedMetricIT extends ESIntegTestCase { params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); - Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg.add(vars.multiplier)", Collections.emptyMap()); + Script combineScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum agg values as a new aggregation", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum aggs of agg values as a new aggregation", Collections.emptyMap()); SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) @@ -939,7 +962,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { * not using a script does get cached. */ public void testDontCacheScripts() throws Exception { - Script mapScript = new Script("_agg['count'] = 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_agg['count'] = 1", Collections.emptyMap()); assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java index e75d14bc87a..11c6ed7f6a2 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -54,7 +55,8 @@ public class ScriptedMetricTests extends BaseAggregationTestCase params = new HashMap<>(); params.put("inc", 1); - Script script = new Script("doc['value'].value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value + inc", params); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -394,7 +394,7 @@ public class StatsIT extends AbstractNumericTestCase { @Override public void testScriptMultiValued() throws Exception { - Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values'].values", emptyMap()); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -420,8 +420,8 @@ public class StatsIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("dec", 1); - Script script = new Script("[ doc['value'].value, doc['value'].value - dec ]", ScriptType.INLINE, - AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "[ doc['value'].value, doc['value'].value - dec ]", + params); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -508,7 +508,8 @@ public class StatsIT extends AbstractNumericTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - stats("foo").field("d").script(new Script("_value + 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null))).get(); + stats("foo").field("d").script( + new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index b2609c76543..61c80d648a6 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -44,6 +44,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -175,7 +176,8 @@ public class SumIT extends AbstractNumericTestCase { public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(sum("sum").field("value").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))) + .addAggregation(sum("sum").field("value").script( + new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", Collections.emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -192,7 +194,7 @@ public class SumIT extends AbstractNumericTestCase { params.put("increment", 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(sum("sum").field("value").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params))) + .addAggregation(sum("sum").field("value").script(new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", params))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -207,7 +209,8 @@ public class SumIT extends AbstractNumericTestCase { public void testScriptSingleValued() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(sum("sum").script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null))) + .addAggregation(sum("sum").script( + new Script(ScriptType.INLINE, ExtractFieldScriptEngine.NAME, "value", Collections.emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -224,7 +227,7 @@ public class SumIT extends AbstractNumericTestCase { params.put("inc", 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(sum("sum").script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params))) + .addAggregation(sum("sum").script(new Script(ScriptType.INLINE, ExtractFieldScriptEngine.NAME, "value", params))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -239,7 +242,8 @@ public class SumIT extends AbstractNumericTestCase { public void testScriptMultiValued() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(sum("sum").script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null))) + .addAggregation(sum("sum").script( + new Script(ScriptType.INLINE, ExtractFieldScriptEngine.NAME, "values", Collections.emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -257,7 +261,7 @@ public class SumIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - sum("sum").script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params))) + sum("sum").script(new Script(ScriptType.INLINE, ExtractFieldScriptEngine.NAME, "values", params))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -289,7 +293,8 @@ public class SumIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(sum("sum").field("values").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))) + .addAggregation(sum("sum").field("values").script( + new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", Collections.emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -305,7 +310,7 @@ public class SumIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("increment", 1); SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(sum("sum").field("values").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params))) + .addAggregation(sum("sum").field("values").script(new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", params))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -365,7 +370,8 @@ public class SumIT extends AbstractNumericTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(sum("foo").field("d").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))).get(); + .addAggregation(sum("foo").field("d").script( + new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", Collections.emptyMap()))).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 3e482229503..67ac4855026 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -241,7 +241,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { randomCompression( percentileRanks("percentile_ranks")) .field("value") - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) .values(pcts)) .execute().actionGet(); @@ -262,7 +262,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { randomCompression( percentileRanks("percentile_ranks")) .field("value") - .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) .values(pcts)) .execute().actionGet(); @@ -297,7 +297,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { randomCompression( percentileRanks("percentile_ranks")) .field("values") - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) .values(pcts)) .execute().actionGet(); @@ -315,7 +315,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { randomCompression( percentileRanks("percentile_ranks")) .field("values") - .script(new Script("_value * -1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value * -1", emptyMap())) .values(pcts)) .execute().actionGet(); @@ -336,7 +336,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { randomCompression( percentileRanks("percentile_ranks")) .field("values") - .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) .values(pcts)) .execute().actionGet(); @@ -354,7 +354,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .addAggregation( randomCompression( percentileRanks("percentile_ranks")) - .script(new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap())) .values(pcts)) .execute().actionGet(); @@ -369,7 +369,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("dec", 1); - Script script = new Script("doc['value'].value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value - dec", params); final double[] pcts = randomPercents(minValue - 1, maxValue - 1); SearchResponse searchResponse = client().prepareSearch("idx") @@ -390,7 +390,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { @Override public void testScriptMultiValued() throws Exception { final double[] pcts = randomPercents(minValues, maxValues); - Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values'].values", emptyMap()); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( @@ -502,7 +502,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(percentileRanks("foo").field("d").values(50.0) - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))).get(); + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index 71f3692a25b..dbc7993c512 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -226,7 +226,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { randomCompression( percentiles("percentiles")) .field("value") - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) .percentiles(pcts)) .execute().actionGet(); @@ -247,7 +247,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { randomCompression( percentiles("percentiles")) .field("value") - .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) .percentiles(pcts)) .execute().actionGet(); @@ -280,7 +280,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { randomCompression( percentiles("percentiles")) .field("values") - .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) .percentiles(pcts)) .execute().actionGet(); @@ -298,7 +298,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { randomCompression( percentiles("percentiles")) .field("values") - .script(new Script("_value * -1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value * -1", emptyMap())) .percentiles(pcts)) .execute().actionGet(); @@ -319,7 +319,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { randomCompression( percentiles("percentiles")) .field("values") - .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) .percentiles(pcts)) .execute().actionGet(); @@ -331,7 +331,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { @Override public void testScriptSingleValued() throws Exception { - Script script = new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()); final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -353,7 +353,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { Map params = new HashMap<>(); params.put("dec", 1); - Script script = new Script("doc['value'].value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value - dec", params); final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") @@ -374,7 +374,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { @Override public void testScriptMultiValued() throws Exception { final double[] pcts = randomPercentiles(); - Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values'].values", emptyMap()); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) @@ -488,7 +488,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(percentiles("foo").field("d") - .percentiles(50.0).script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .percentiles(50.0).script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 3cdcbf3c1c1..4bc640dc900 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -583,7 +583,7 @@ public class TopHitsIT extends ESIntegTestCase { .explain(true) .storedField("text") .fieldDataField("field1") - .scriptField("script", new Script("5", ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())) + .scriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) .fetchSource("text", null) .version(true) ) @@ -865,7 +865,7 @@ public class TopHitsIT extends ESIntegTestCase { nested("to-comments", "comments").subAggregation( topHits("top-comments").size(1).highlighter(new HighlightBuilder().field(hlField)).explain(true) .fieldDataField("comments.user") - .scriptField("script", new Script("5", ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())).fetchSource("comments.message", null) + .scriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())).fetchSource("comments.message", null) .version(true).sort("comments.date", SortOrder.ASC))).get(); assertHitCount(searchResponse, 2); Nested nested = searchResponse.getAggregations().get("to-comments"); @@ -1014,7 +1014,8 @@ public class TopHitsIT extends ESIntegTestCase { // Test that a request using a script field does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(topHits("foo").scriptField("bar", new Script("5", ScriptType.INLINE, CustomScriptPlugin.NAME, null))).get(); + .addAggregation(topHits("foo").scriptField("bar", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "5", Collections.emptyMap()))).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() @@ -1025,7 +1026,8 @@ public class TopHitsIT extends ESIntegTestCase { // Test that a request using a script sort does not get cached r = client().prepareSearch("cache_test_idx").setSize(0) .addAggregation(topHits("foo").sort( - SortBuilders.scriptSort(new Script("5", ScriptType.INLINE, CustomScriptPlugin.NAME, null), ScriptSortType.STRING))) + SortBuilders.scriptSort( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "5", Collections.emptyMap()), ScriptSortType.STRING))) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index 4d421892b6b..72a546885c9 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -157,7 +157,7 @@ public class ValueCountIT extends ESIntegTestCase { public void testSingleValuedScript() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(count("count").script(new Script("value", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))).execute().actionGet(); + .addAggregation(count("count").script(new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "value", Collections.emptyMap()))).execute().actionGet(); assertHitCount(searchResponse, 10); @@ -169,7 +169,7 @@ public class ValueCountIT extends ESIntegTestCase { public void testMultiValuedScript() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(count("count").script(new Script("values", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))).execute().actionGet(); + .addAggregation(count("count").script(new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "values", Collections.emptyMap()))).execute().actionGet(); assertHitCount(searchResponse, 10); @@ -182,7 +182,7 @@ public class ValueCountIT extends ESIntegTestCase { public void testSingleValuedScriptWithParams() throws Exception { Map params = Collections.singletonMap("s", "value"); SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(count("count").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params))).execute().actionGet(); + .addAggregation(count("count").script(new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", params))).execute().actionGet(); assertHitCount(searchResponse, 10); @@ -195,7 +195,7 @@ public class ValueCountIT extends ESIntegTestCase { public void testMultiValuedScriptWithParams() throws Exception { Map params = Collections.singletonMap("s", "values"); SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(count("count").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params))).execute().actionGet(); + .addAggregation(count("count").script(new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "", params))).execute().actionGet(); assertHitCount(searchResponse, 10); @@ -224,7 +224,8 @@ public class ValueCountIT extends ESIntegTestCase { // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(count("foo").field("d").script(new Script("value", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))) + .addAggregation(count("foo").field("d").script( + new Script(ScriptType.INLINE, FieldValueScriptEngine.NAME, "value", Collections.emptyMap()))) .get(); assertSearchResponse(r); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java index 47846eaabae..e76b02a8c95 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java @@ -164,8 +164,9 @@ public class BucketScriptIT extends ESIntegTestCase { .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( bucketScript("seriesArithmetic", - new Script("_value0 + _value1 + _value2", ScriptType.INLINE, CustomScriptPlugin.NAME, null) - , "field2Sum", "field3Sum", "field4Sum"))) + new Script(ScriptType.INLINE, + CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Sum", "field3Sum", "field4Sum"))) .execute().actionGet(); assertSearchResponse(response); @@ -210,7 +211,8 @@ public class BucketScriptIT extends ESIntegTestCase { .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( bucketScript("seriesArithmetic", - new Script("_value0 + _value1 / _value2", ScriptType.INLINE, CustomScriptPlugin.NAME, null), + new Script(ScriptType.INLINE, + CustomScriptPlugin.NAME, "_value0 + _value1 / _value2", Collections.emptyMap()), "field2Sum", "field3Sum", "field4Sum"))) .execute().actionGet(); @@ -256,7 +258,7 @@ public class BucketScriptIT extends ESIntegTestCase { .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( bucketScript("seriesArithmetic", - new Script("_value0 + _value1 + _value2", ScriptType.INLINE, CustomScriptPlugin.NAME, null) + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()) , "field2Sum", "field3Sum", "field4Sum"))) .execute().actionGet(); @@ -300,7 +302,7 @@ public class BucketScriptIT extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation( bucketScript("seriesArithmetic", - new Script("_value0", ScriptType.INLINE, CustomScriptPlugin.NAME, null), + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0", Collections.emptyMap()), "field2Sum"))) .execute().actionGet(); @@ -344,7 +346,8 @@ public class BucketScriptIT extends ESIntegTestCase { .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( bucketScript("seriesArithmetic", bucketsPathsMap, - new Script("foo + bar + baz", ScriptType.INLINE, CustomScriptPlugin.NAME, null)))) + new Script(ScriptType.INLINE, + CustomScriptPlugin.NAME, "foo + bar + baz", Collections.emptyMap())))) .execute().actionGet(); assertSearchResponse(response); @@ -381,7 +384,7 @@ public class BucketScriptIT extends ESIntegTestCase { Map params = new HashMap<>(); params.put("factor", 3); - Script script = new Script("(_value0 + _value1 + _value2) * factor", ScriptType.INLINE, CustomScriptPlugin.NAME, params); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "(_value0 + _value1 + _value2) * factor", params); SearchResponse response = client() .prepareSearch("idx") @@ -437,7 +440,8 @@ public class BucketScriptIT extends ESIntegTestCase { .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( bucketScript("seriesArithmetic", - new Script("_value0 + _value1 + _value2", ScriptType.INLINE, CustomScriptPlugin.NAME, null), + new Script(ScriptType.INLINE, + CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), "field2Sum", "field3Sum", "field4Sum").gapPolicy(GapPolicy.INSERT_ZEROS))) .execute().actionGet(); @@ -491,7 +495,7 @@ public class BucketScriptIT extends ESIntegTestCase { .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( bucketScript("seriesArithmetic", - new Script("my_script", ScriptType.STORED, CustomScriptPlugin.NAME, null), + new Script(ScriptType.STORED, CustomScriptPlugin.NAME, "my_script", Collections.emptyMap()), "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); assertSearchResponse(response); @@ -536,7 +540,8 @@ public class BucketScriptIT extends ESIntegTestCase { .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( bucketScript("seriesArithmetic", - new Script("_value0 + _value1 + _value2", ScriptType.INLINE, CustomScriptPlugin.NAME, null), + new Script(ScriptType.INLINE, + CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), "field2Sum", "field3Sum", "field4Sum"))) .execute().actionGet(); @@ -560,7 +565,8 @@ public class BucketScriptIT extends ESIntegTestCase { .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( bucketScript("seriesArithmetic", - new Script("_value0 + _value1 + _value2", ScriptType.INLINE, CustomScriptPlugin.NAME, null), + new Script(ScriptType.INLINE, + CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java index 4b79977cf54..d485a0b6d87 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java @@ -42,12 +42,11 @@ public class BucketScriptTests extends BasePipelineAggregationTestCase params = null; + Map params = new HashMap<>(); if (randomBoolean()) { - params = new HashMap(); params.put("foo", "bar"); } - script = new Script("script", randomFrom(ScriptType.values()), randomFrom("my_lang", null), params); + script = new Script(randomFrom(ScriptType.values()), randomFrom("my_lang", Script.DEFAULT_SCRIPT_LANG), "script", params); } BucketScriptPipelineAggregationBuilder factory = new BucketScriptPipelineAggregationBuilder(name, bucketsPaths, script); if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java index e300a72bc37..67261ee02a4 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java @@ -176,8 +176,8 @@ public class BucketSelectorIT extends ESIntegTestCase { } public void testInlineScript() { - Script script = - new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(FIELD_1_NAME).interval(interval) @@ -205,7 +205,8 @@ public class BucketSelectorIT extends ESIntegTestCase { } public void testInlineScriptNoBucketsPruned() { - Script script = new Script("Double.isNaN(_value0) ? true : (_value0 < 10000)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? true : (_value0 < 10000)", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -238,7 +239,8 @@ public class BucketSelectorIT extends ESIntegTestCase { } public void testInlineScriptNoBucketsLeft() { - Script script = new Script("Double.isNaN(_value0) ? false : (_value0 > 10000)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 > 10000)", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -261,7 +263,8 @@ public class BucketSelectorIT extends ESIntegTestCase { } public void testInlineScript2() { - Script script = new Script("Double.isNaN(_value0) ? false : (_value0 < _value1)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 < _value1)", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -294,7 +297,8 @@ public class BucketSelectorIT extends ESIntegTestCase { } public void testInlineScriptSingleVariable() { - Script script = new Script("Double.isNaN(_value0) ? false : (_value0 > 100)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 > 100)", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -323,8 +327,8 @@ public class BucketSelectorIT extends ESIntegTestCase { } public void testInlineScriptNamedVars() { - Script script = new Script("Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)", ScriptType.INLINE, - CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)", Collections.emptyMap()); Map bucketPathsMap = new HashMap<>(); bucketPathsMap.put("my_value1", "field2Sum"); @@ -360,8 +364,8 @@ public class BucketSelectorIT extends ESIntegTestCase { } public void testInlineScriptWithParams() { - Script script = new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > threshold)", ScriptType.INLINE, - CustomScriptPlugin.NAME, Collections.singletonMap("threshold", 100)); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 + _value1 > threshold)", Collections.singletonMap("threshold", 100)); SearchResponse response = client().prepareSearch("idx") .addAggregation( @@ -393,7 +397,7 @@ public class BucketSelectorIT extends ESIntegTestCase { } public void testInlineScriptInsertZeros() { - Script script = new Script("_value0 + _value1 > 100", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 > 100", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx") .addAggregation( @@ -432,7 +436,7 @@ public class BucketSelectorIT extends ESIntegTestCase { // Source is not interpreted but my_script is defined in CustomScriptPlugin .setSource(new BytesArray("{ \"script\": \"Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)\" }"))); - Script script = new Script("my_script", ScriptType.STORED, CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.STORED, CustomScriptPlugin.NAME, "my_script", Collections.emptyMap()); SearchResponse response = client() .prepareSearch("idx") @@ -465,8 +469,8 @@ public class BucketSelectorIT extends ESIntegTestCase { } public void testUnmapped() throws Exception { - Script script = new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, - CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx_unmapped") .addAggregation( @@ -487,8 +491,8 @@ public class BucketSelectorIT extends ESIntegTestCase { } public void testPartiallyUnmapped() throws Exception { - Script script = new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, - CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx", "idx_unmapped") .addAggregation( diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java index 8cedbab8e83..563894906ed 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java @@ -42,12 +42,11 @@ public class BucketSelectorTests extends BasePipelineAggregationTestCase params = null; + Map params = new HashMap<>(); if (randomBoolean()) { - params = new HashMap(); params.put("foo", "bar"); } - script = new Script("script", randomFrom(ScriptType.values()), randomFrom("my_lang", null), params); + script = new Script(randomFrom(ScriptType.values()), randomFrom("my_lang", Script.DEFAULT_SCRIPT_LANG), "script", params); } BucketSelectorPipelineAggregationBuilder factory = new BucketSelectorPipelineAggregationBuilder(name, bucketsPaths, script); if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index dc2898424fd..e406cb72aea 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -44,10 +44,13 @@ import org.elasticsearch.test.engine.MockEngineSupport; import org.elasticsearch.test.engine.ThrowingLeafReaderWrapper; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Random; +import java.util.Set; import java.util.concurrent.ExecutionException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -56,7 +59,14 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class, MockEngineFactoryPlugin.class); + return Arrays.asList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class); + } + + @Override + protected Collection> getMockPlugins() { + Set> mocks = new HashSet<>(super.getMockPlugins()); + mocks.remove(MockEngineFactoryPlugin.class); + return mocks; } public void testRandomExceptions() throws IOException, InterruptedException, ExecutionException { @@ -153,17 +163,22 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper { - public static class TestPlugin extends Plugin { + public static class TestPlugin extends MockEngineFactoryPlugin { public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, Property.IndexScope); public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, Property.IndexScope); @Override public List> getSettings() { - return Arrays.asList(EXCEPTION_TOP_LEVEL_RATIO_SETTING, EXCEPTION_LOW_LEVEL_RATIO_SETTING); + List> settings = new ArrayList<>(); + settings.addAll(super.getSettings()); + settings.add(EXCEPTION_TOP_LEVEL_RATIO_SETTING); + settings.add(EXCEPTION_LOW_LEVEL_RATIO_SETTING); + return settings; } - public void onModule(MockEngineFactoryPlugin.MockEngineReaderModule module) { - module.setReaderClass(RandomExceptionDirectoryReaderWrapper.class); + @Override + protected Class getReaderWrapperClass() { + return RandomExceptionDirectoryReaderWrapper.class; } } diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 696af4f94eb..9f03724d3c6 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -43,6 +43,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; +import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.TreeSet; @@ -450,7 +451,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { MultiSearchResponse response = client().prepareMultiSearch() // Add custom score query with bogus script - .add(client().prepareSearch("test").setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("nid", 1), new ScriptScoreFunctionBuilder(new Script("foo", ScriptType.INLINE, "bar", null))))) + .add(client().prepareSearch("test").setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("nid", 1), new ScriptScoreFunctionBuilder(new Script(ScriptType.INLINE, "bar", "foo", Collections.emptyMap()))))) .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2))) .add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())) .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index 519ea77f128..809591cd688 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -167,7 +167,7 @@ public class InnerHitsIT extends ESIntegTestCase { .setExplain(true) .addDocValueField("comments.message") .addScriptField("script", - new Script("5", ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())) + new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) .setSize(1) )).get(); assertNoFailures(response); @@ -301,8 +301,8 @@ public class InnerHitsIT extends ESIntegTestCase { .addDocValueField("message") .setHighlightBuilder(new HighlightBuilder().field("message")) .setExplain(true).setSize(1) - .addScriptField("script", new Script("5", ScriptType.INLINE, - MockScriptEngine.NAME, Collections.emptyMap())) + .addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", + Collections.emptyMap())) ) ).get(); assertNoFailures(response); @@ -995,4 +995,21 @@ public class InnerHitsIT extends ESIntegTestCase { equalTo("fox ate rabbit x y z")); } + public void testNestedInnerHitWrappedInParentChildInnerhit() throws Exception { + assertAcked(prepareCreate("test").addMapping("child_type", "_parent", "type=parent_type", "nested_type", "type=nested")); + client().prepareIndex("test", "parent_type", "1").setSource("key", "value").get(); + client().prepareIndex("test", "child_type", "2").setParent("1").setSource("nested_type", Collections.singletonMap("key", "value")) + .get(); + refresh(); + SearchResponse response = client().prepareSearch("test") + .setQuery(boolQuery().must(matchQuery("key", "value")) + .should(hasChildQuery("child_type", nestedQuery("nested_type", matchAllQuery(), ScoreMode.None) + .innerHit(new InnerHitBuilder()), ScoreMode.None).innerHit(new InnerHitBuilder()))) + .get(); + assertHitCount(response, 1); + SearchHit hit = response.getHits().getAt(0); + assertThat(hit.getInnerHits().get("child_type").getAt(0).field("_parent").getValue(), equalTo("1")); + assertThat(hit.getInnerHits().get("child_type").getAt(0).getInnerHits().get("nested_type").getAt(0).field("_parent"), nullValue()); + } + } diff --git a/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 9e3cc14eed0..da844b1969e 100644 --- a/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -285,15 +285,18 @@ public class SearchFieldsIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) - .addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) - .addScriptField("sNum1_field", new Script("_fields['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) - .addScriptField("date1", new Script("doc['date'].date.millis", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("sNum1", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) + .addScriptField("sNum1_field", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields['num1'].value", Collections.emptyMap())) + .addScriptField("date1", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['date'].date.millis", Collections.emptyMap())) .execute().actionGet(); assertNoFailures(response); assertThat(response.getHits().totalHits(), equalTo(3L)); - assertThat(response.getHits().getAt(0).hasSource(), equalTo(true)); + assertFalse(response.getHits().getAt(0).hasSource()); assertThat(response.getHits().getAt(0).id(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates @@ -321,7 +324,7 @@ public class SearchFieldsIT extends ESIntegTestCase { response = client().prepareSearch() .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) - .addScriptField("sNum1", new Script("doc['num1'].value * factor", ScriptType.INLINE, CustomScriptPlugin.NAME, params)) + .addScriptField("sNum1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value * factor", params)) .get(); assertThat(response.getHits().totalHits(), equalTo(3L)); @@ -357,7 +360,7 @@ public class SearchFieldsIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .setSize(numDocs) - .addScriptField("uid", new Script("_fields._uid.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("uid", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._uid.value", Collections.emptyMap())) .get(); assertNoFailures(response); @@ -375,7 +378,7 @@ public class SearchFieldsIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .setSize(numDocs) - .addScriptField("id", new Script("_fields._id.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap())) .get(); assertNoFailures(response); @@ -393,7 +396,8 @@ public class SearchFieldsIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .setSize(numDocs) - .addScriptField("type", new Script("_fields._type.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("type", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap())) .get(); assertNoFailures(response); @@ -411,9 +415,10 @@ public class SearchFieldsIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .setSize(numDocs) - .addScriptField("id", new Script("_fields._id.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) - .addScriptField("uid", new Script("_fields._uid.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) - .addScriptField("type", new Script("_fields._type.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap())) + .addScriptField("uid", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._uid.value", Collections.emptyMap())) + .addScriptField("type", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap())) .get(); assertNoFailures(response); @@ -444,11 +449,13 @@ public class SearchFieldsIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) - .addScriptField("s_obj1", new Script("_source.obj1", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) - .addScriptField("s_obj1_test", new Script("_source.obj1.test", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) - .addScriptField("s_obj2", new Script("_source.obj2", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) - .addScriptField("s_obj2_arr2", new Script("_source.obj2.arr2", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) - .addScriptField("s_arr3", new Script("_source.arr3", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("s_obj1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj1", Collections.emptyMap())) + .addScriptField("s_obj1_test", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj1.test", Collections.emptyMap())) + .addScriptField("s_obj2", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj2", Collections.emptyMap())) + .addScriptField("s_obj2_arr2", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj2.arr2", Collections.emptyMap())) + .addScriptField("s_arr3", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.arr3", Collections.emptyMap())) .get(); assertThat("Failures " + Arrays.toString(response.getShardFailures()), response.getShardFailures().length, equalTo(0)); @@ -481,7 +488,8 @@ public class SearchFieldsIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) - .addScriptField("test_script_1", new Script("return null", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("test_script_1", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return null", Collections.emptyMap())) .get(); assertNoFailures(response); @@ -847,7 +855,8 @@ public class SearchFieldsIT extends ESIntegTestCase { ensureSearchable(); SearchRequestBuilder req = client().prepareSearch("index"); for (String field : Arrays.asList("s", "ms", "l", "ml", "d", "md")) { - req.addScriptField(field, new Script("doc['" + field + "'].values", ScriptType.INLINE, CustomScriptPlugin.NAME, null)); + req.addScriptField(field, + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + field + "'].values", Collections.emptyMap())); } SearchResponse resp = req.get(); assertSearchResponse(resp); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index dacd11c843f..9b732bdc00d 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -44,6 +44,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; @@ -76,7 +77,8 @@ public class ExplainableScriptIT extends ESIntegTestCase { SearchResponse response = client().search(searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().explain(true).query( functionScoreQuery(termQuery("text", "text"), - scriptFunction(new Script("native_explainable_script", ScriptType.INLINE, "native", null))) + scriptFunction( + new Script(ScriptType.INLINE, "native", "native_explainable_script", Collections.emptyMap()))) .boostMode(CombineFunction.REPLACE)))).actionGet(); ElasticsearchAssertions.assertNoFailures(response); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index 2bd6de9e647..e9cb9d72a8b 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -94,8 +94,8 @@ public class FunctionScoreIT extends ESIntegTestCase { index(INDEX, TYPE, "1", jsonBuilder().startObject().field("dummy_field", 1).endObject()); refresh(); - Script scriptOne = new Script("1", ScriptType.INLINE, CustomScriptPlugin.NAME, null); - Script scriptTwo = new Script("get score value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script scriptOne = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "1", Collections.emptyMap()); + Script scriptTwo = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get score value", Collections.emptyMap()); SearchResponse response = client().search( searchRequest().source( @@ -118,7 +118,7 @@ public class FunctionScoreIT extends ESIntegTestCase { index(INDEX, TYPE, "1", jsonBuilder().startObject().field("dummy_field", 1).endObject()); refresh(); - Script script = new Script("get score value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get score value", Collections.emptyMap()); SearchResponse response = client().search( searchRequest().source( @@ -146,7 +146,7 @@ public class FunctionScoreIT extends ESIntegTestCase { refresh(); ensureYellow(); - Script script = new Script("doc['random_score']", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['random_score']", Collections.emptyMap()); SearchResponse searchResponse = client().search( searchRequest().source(searchSource().query(functionScoreQuery(scriptFunction(script)).setMinScore(minScore))) ).actionGet(); @@ -178,7 +178,7 @@ public class FunctionScoreIT extends ESIntegTestCase { docs.add(client().prepareIndex(INDEX, TYPE, Integer.toString(i)).setSource("num", i + scoreOffset)); } indexRandom(true, docs); - Script script = new Script("return (doc['num'].value)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return (doc['num'].value)", Collections.emptyMap()); int numMatchingDocs = numDocs + scoreOffset - minScore; if (numMatchingDocs < 0) { numMatchingDocs = 0; diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index cd283605503..a6f7eb760d2 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -169,7 +169,7 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { params.put("factor", randomIntBetween(2, 4)); // Test for accessing _score - Script script = new Script("log(doc['index'].value + (factor * _score))", ScriptType.INLINE, NAME, params); + Script script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score))", params); SearchResponse resp = client() .prepareSearch("test") .setQuery( @@ -185,7 +185,7 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.intValue() - script = new Script("log(doc['index'].value + (factor * _score.intValue()))", ScriptType.INLINE, NAME, params); + script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.intValue()))", params); resp = client() .prepareSearch("test") .setQuery( @@ -201,7 +201,7 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.longValue() - script = new Script("log(doc['index'].value + (factor * _score.longValue()))", ScriptType.INLINE, NAME, params); + script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.longValue()))", params); resp = client() .prepareSearch("test") .setQuery( @@ -217,7 +217,7 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.floatValue() - script = new Script("log(doc['index'].value + (factor * _score.floatValue()))", ScriptType.INLINE, NAME, params); + script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.floatValue()))", params); resp = client() .prepareSearch("test") .setQuery( @@ -233,7 +233,7 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.doubleValue() - script = new Script("log(doc['index'].value + (factor * _score.doubleValue()))", ScriptType.INLINE, NAME, params); + script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.doubleValue()))", params); resp = client() .prepareSearch("test") .setQuery( diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java index 841b6bcd987..6c9acd7e8a7 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java @@ -48,6 +48,7 @@ import org.junit.Before; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -127,7 +128,7 @@ public class GeoDistanceIT extends ESIntegTestCase { // Test doc['location'].arcDistance(lat, lon) SearchResponse searchResponse1 = client().prepareSearch().addStoredField("_source") - .addScriptField("distance", new Script("arcDistance", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("distance", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "arcDistance", Collections.emptyMap())) .get(); Double resultDistance1 = searchResponse1.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance1, @@ -135,16 +136,16 @@ public class GeoDistanceIT extends ESIntegTestCase { // Test doc['location'].planeDistance(lat, lon) SearchResponse searchResponse2 = client().prepareSearch().addStoredField("_source") - .addScriptField("distance", new Script("planeDistance", ScriptType.INLINE, - CustomScriptPlugin.NAME, null)).get(); + .addScriptField("distance", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "planeDistance", + Collections.emptyMap())).get(); Double resultDistance2 = searchResponse2.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance2, closeTo(GeoUtils.planeDistance(src_lat, src_lon, tgt_lat, tgt_lon), 0.01d)); // Test doc['location'].geohashDistance(lat, lon) SearchResponse searchResponse4 = client().prepareSearch().addStoredField("_source") - .addScriptField("distance", new Script("geohashDistance", ScriptType.INLINE, - CustomScriptPlugin.NAME, null)).get(); + .addScriptField("distance", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "geohashDistance", + Collections.emptyMap())).get(); Double resultDistance4 = searchResponse4.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance4, closeTo(GeoUtils.arcDistance(src_lat, src_lon, GeoHashUtils.decodeLatitude(tgt_geohash), @@ -152,16 +153,16 @@ public class GeoDistanceIT extends ESIntegTestCase { // Test doc['location'].arcDistance(lat, lon + 360)/1000d SearchResponse searchResponse5 = client().prepareSearch().addStoredField("_source") - .addScriptField("distance", new Script("arcDistance(lat, lon + 360)/1000d", ScriptType.INLINE, - CustomScriptPlugin.NAME, null)).get(); + .addScriptField("distance", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "arcDistance(lat, lon + 360)/1000d", + Collections.emptyMap())).get(); Double resultArcDistance5 = searchResponse5.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance5, closeTo(GeoUtils.arcDistance(src_lat, src_lon, tgt_lat, tgt_lon)/1000d, 0.01d)); // Test doc['location'].arcDistance(lat + 360, lon)/1000d SearchResponse searchResponse6 = client().prepareSearch().addStoredField("_source") - .addScriptField("distance", new Script("arcDistance(lat + 360, lon)/1000d", ScriptType.INLINE, - CustomScriptPlugin.NAME, null)).get(); + .addScriptField("distance", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "arcDistance(lat + 360, lon)/1000d", + Collections.emptyMap())).get(); Double resultArcDistance6 = searchResponse6.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance6, closeTo(GeoUtils.arcDistance(src_lat, src_lon, tgt_lat, tgt_lon)/1000d, 0.01d)); diff --git a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java index 20216e10593..2cb425d5274 100644 --- a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java +++ b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.internal; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.text.Text; @@ -26,7 +27,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; -import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.HashMap; import java.util.Map; @@ -76,4 +76,22 @@ public class InternalSearchHitTests extends ESTestCase { assertThat(results.getAt(1).shard(), equalTo(target)); } + public void testNullSource() throws Exception { + InternalSearchHit searchHit = new InternalSearchHit(0, "_id", new Text("_type"), null); + + assertThat(searchHit.source(), nullValue()); + assertThat(searchHit.sourceRef(), nullValue()); + assertThat(searchHit.sourceAsMap(), nullValue()); + assertThat(searchHit.sourceAsString(), nullValue()); + assertThat(searchHit.getSource(), nullValue()); + assertThat(searchHit.getSourceRef(), nullValue()); + assertThat(searchHit.getSourceAsString(), nullValue()); + } + + public void testHasSource() { + InternalSearchHit searchHit = new InternalSearchHit(randomInt()); + assertFalse(searchHit.hasSource()); + searchHit.sourceRef(new BytesArray("{}")); + assertTrue(searchHit.hasSource()); + } } diff --git a/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java index b1b0da73c35..79dea0e74b5 100644 --- a/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.query; import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -27,6 +28,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -201,33 +203,36 @@ public class QueryStringIT extends ESIntegTestCase { assertHits(resp.getHits(), "2", "3"); assertHitCount(resp, 2L); - // Will be fixed once https://github.com/elastic/elasticsearch/pull/20965 is in - // resp = client().prepareSearch("test") - // .setQuery(queryStringQuery("Foo Bar").splitOnWhitespcae(false)) - // .get(); - // assertHits(resp.getHits(), "1", "2", "3"); - // assertHitCount(resp, 3L); + resp = client().prepareSearch("test") + .setQuery(queryStringQuery("Foo Bar").splitOnWhitespace(false)) + .get(); + assertHits(resp.getHits(), "1", "2", "3"); + assertHitCount(resp, 3L); } public void testExplicitAllFieldsRequested() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index-with-all.json"); + prepareCreate("test2").setSource(indexBody).get(); + ensureGreen("test2"); + List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo", - "f_date", "2015/09/02", - "f_float", "1.7", - "f_ip", "127.0.0.1")); - reqs.add(client().prepareIndex("test", "doc", "2").setSource("f1", "bar", - "f_date", "2015/09/01", - "f_float", "1.8", - "f_ip", "127.0.0.2")); + reqs.add(client().prepareIndex("test2", "doc", "1").setSource("f1", "foo", "f2", "eggplant")); indexRandom(true, false, reqs); - SearchResponse resp = client().prepareSearch("test").setQuery( - queryStringQuery("127.0.0.2 \"2015/09/02\"") - .field("f_ip") // Usually this would mean we wouldn't search "all" fields - .useAllFields(true)) // ... unless explicitly requested - .get(); - assertHits(resp.getHits(), "1", "2"); - assertHitCount(resp, 2L); + SearchResponse resp = client().prepareSearch("test2").setQuery( + queryStringQuery("foo eggplent").defaultOperator(Operator.AND)).get(); + assertHitCount(resp, 0L); + + resp = client().prepareSearch("test2").setQuery( + queryStringQuery("foo eggplent").defaultOperator(Operator.AND).useAllFields(true)).get(); + assertHits(resp.getHits(), "1"); + assertHitCount(resp, 1L); + + Exception e = expectThrows(Exception.class, () -> + client().prepareSearch("test2").setQuery( + queryStringQuery("blah").field("f1").useAllFields(true)).get()); + assertThat(ExceptionsHelper.detailedMessage(e), + containsString("cannot use [all_fields] parameter in conjunction with [default_field] or [fields]")); } @LuceneTestCase.AwaitsFix(bugUrl="currently can't perform phrase queries on fields that don't support positions") diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 1cb9d6508aa..311ddb7e3b4 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -1904,7 +1904,7 @@ public class SearchQueryIT extends ESIntegTestCase { assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").lte(-999999999999L)).get(), 3); } - @AwaitsFix(bugUrl = "NOCOMMIT") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/21501") public void testRangeQueryWithTimeZone() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "date", "type=date", "num", "type=integer")); diff --git a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index b98bc5d43cd..60f89ab326e 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -19,23 +19,33 @@ package org.elasticsearch.search.query; +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.SimpleQueryStringFlag; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; @@ -43,6 +53,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitC import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; /** @@ -352,4 +364,212 @@ public class SimpleQueryStringIT extends ESIntegTestCase { assertNoFailures(searchResponse); assertHitCount(searchResponse, 0L); } + + public void testBasicAllQuery() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + prepareCreate("test").setSource(indexBody).get(); + ensureGreen("test"); + + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo bar baz")); + reqs.add(client().prepareIndex("test", "doc", "2").setSource("f2", "Bar")); + reqs.add(client().prepareIndex("test", "doc", "3").setSource("f3", "foo bar baz")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); + assertHitCount(resp, 2L); + assertHits(resp.getHits(), "1", "3"); + + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("bar")).get(); + assertHitCount(resp, 2L); + assertHits(resp.getHits(), "1", "3"); + + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")).get(); + assertHitCount(resp, 3L); + assertHits(resp.getHits(), "1", "2", "3"); + + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foa")).get(); + assertHitCount(resp, 1L); + assertHits(resp.getHits(), "3"); + } + + public void testWithDate() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + prepareCreate("test").setSource(indexBody).get(); + ensureGreen("test"); + + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo", "f_date", "2015/09/02")); + reqs.add(client().prepareIndex("test", "doc", "2").setSource("f1", "bar", "f_date", "2015/09/01")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")).get(); + assertHits(resp.getHits(), "1"); + assertHitCount(resp, 1L); + + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("bar \"2015/09/02\"")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\" \"2015/09/01\"")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + } + + public void testWithLotsOfTypes() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + prepareCreate("test").setSource(indexBody).get(); + ensureGreen("test"); + + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo", + "f_date", "2015/09/02", + "f_float", "1.7", + "f_ip", "127.0.0.1")); + reqs.add(client().prepareIndex("test", "doc", "2").setSource("f1", "bar", + "f_date", "2015/09/01", + "f_float", "1.8", + "f_ip", "127.0.0.2")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")).get(); + assertHits(resp.getHits(), "1"); + assertHitCount(resp, 1L); + + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.2 \"2015/09/02\"")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1 1.8")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + } + + public void testDocWithAllTypes() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + prepareCreate("test").setSource(indexBody).get(); + ensureGreen("test"); + + List reqs = new ArrayList<>(); + String docBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-example-document.json"); + reqs.add(client().prepareIndex("test", "doc", "1").setSource(docBody)); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("Baz")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("sbaz")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("19")).get(); + assertHits(resp.getHits(), "1"); + // nested doesn't match because it's hidden + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("1476383971")).get(); + assertHits(resp.getHits(), "1"); + // bool doesn't match + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("7")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("23")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("1293")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("42")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("1.7")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("1.5")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("12.23")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1")).get(); + assertHits(resp.getHits(), "1"); + // binary doesn't match + // suggest doesn't match + // geo_point doesn't match + // geo_shape doesn't match + + resp = client().prepareSearch("test").setQuery( + simpleQueryStringQuery("foo Bar 19 127.0.0.1").defaultOperator(Operator.AND)).get(); + assertHits(resp.getHits(), "1"); + } + + public void testKeywordWithWhitespace() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + prepareCreate("test").setSource(indexBody).get(); + ensureGreen("test"); + + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f2", "Foo Bar")); + reqs.add(client().prepareIndex("test", "doc", "2").setSource("f1", "bar")); + reqs.add(client().prepareIndex("test", "doc", "3").setSource("f1", "foo bar")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); + assertHits(resp.getHits(), "3"); + assertHitCount(resp, 1L); + + resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("bar")).get(); + assertHits(resp.getHits(), "2", "3"); + assertHitCount(resp, 2L); + } + + public void testExplicitAllFieldsRequested() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index-with-all.json"); + prepareCreate("test").setSource(indexBody).get(); + ensureGreen("test"); + + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo", "f2", "eggplant")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery( + simpleQueryStringQuery("foo eggplent").defaultOperator(Operator.AND)).get(); + assertHitCount(resp, 0L); + + resp = client().prepareSearch("test").setQuery( + simpleQueryStringQuery("foo eggplent").defaultOperator(Operator.AND).useAllFields(true)).get(); + assertHits(resp.getHits(), "1"); + assertHitCount(resp, 1L); + + Exception e = expectThrows(Exception.class, () -> + client().prepareSearch("test").setQuery( + simpleQueryStringQuery("blah").field("f1").useAllFields(true)).get()); + assertThat(ExceptionsHelper.detailedMessage(e), + containsString("cannot use [all_fields] parameter in conjunction with [fields]")); + } + + @LuceneTestCase.AwaitsFix(bugUrl="currently can't perform phrase queries on fields that don't support positions") + public void testPhraseQueryOnFieldWithNoPositions() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + prepareCreate("test").setSource(indexBody).get(); + ensureGreen("test"); + + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo bar", "f4", "eggplant parmesan")); + reqs.add(client().prepareIndex("test", "doc", "2").setSource("f1", "foo bar", "f4", "chicken parmesan")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("\"eggplant parmesan\"")).get(); + assertHits(resp.getHits(), "1"); + assertHitCount(resp, 1L); + } + + private void assertHits(SearchHits hits, String... ids) { + assertThat(hits.totalHits(), equalTo((long) ids.length)); + Set hitIds = new HashSet<>(); + for (SearchHit hit : hits.getHits()) { + hitIds.add(hit.id()); + } + assertThat(hitIds, containsInAnyOrder(ids)); + } } diff --git a/core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index f01f6d5d0e1..02fde4d9717 100644 --- a/core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -104,9 +104,11 @@ public class ScriptQuerySearchIT extends ESIntegTestCase { logger.info("running doc['num1'].value > 1"); SearchResponse response = client().prepareSearch() - .setQuery(scriptQuery(new Script("doc['num1'].value > 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .setQuery(scriptQuery( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > 1", Collections.emptyMap()))) .addSort("num1", SortOrder.ASC) - .addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("sNum1", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) .get(); assertThat(response.getHits().totalHits(), equalTo(2L)); @@ -121,9 +123,10 @@ public class ScriptQuerySearchIT extends ESIntegTestCase { logger.info("running doc['num1'].value > param1"); response = client() .prepareSearch() - .setQuery(scriptQuery(new Script("doc['num1'].value > param1", ScriptType.INLINE, CustomScriptPlugin.NAME, params))) + .setQuery(scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > param1", params))) .addSort("num1", SortOrder.ASC) - .addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("sNum1", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) .get(); assertThat(response.getHits().totalHits(), equalTo(1L)); @@ -135,9 +138,10 @@ public class ScriptQuerySearchIT extends ESIntegTestCase { logger.info("running doc['num1'].value > param1"); response = client() .prepareSearch() - .setQuery(scriptQuery(new Script("doc['num1'].value > param1", ScriptType.INLINE, CustomScriptPlugin.NAME, params))) + .setQuery(scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > param1", params))) .addSort("num1", SortOrder.ASC) - .addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("sNum1", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) .get(); assertThat(response.getHits().totalHits(), equalTo(3L)); diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 5146660aa09..d6f96c4c586 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; import java.io.IOException; +import java.util.Collections; import java.util.HashSet; import java.util.Set; @@ -76,7 +77,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase randomAsciiOfLengthBetween(3, 20))); + builder.collateQuery(randomValueOtherThan(collateQuery.getIdOrCode(), () -> randomAsciiOfLengthBetween(3, 20))); } else { builder.collateQuery(randomAsciiOfLengthBetween(3, 20)); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index b12b993a61b..65cbda6923a 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -445,7 +445,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .setType("fs").setSettings(Settings.builder().put("location", randomRepoPath()))); logger.info("--> creating test template"); - assertThat(client.admin().indices().preparePutTemplate("test-template").setTemplate("te*").addMapping("test-mapping", XContentFactory.jsonBuilder().startObject().startObject("test-mapping").startObject("properties") + assertThat(client.admin().indices().preparePutTemplate("test-template").setPatterns(Collections.singletonList("te*")).addMapping("test-mapping", XContentFactory.jsonBuilder().startObject().startObject("test-mapping").startObject("properties") .startObject("field1").field("type", "string").field("store", "yes").endObject() .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() .endObject().endObject().endObject()).get().isAcknowledged(), equalTo(true)); @@ -486,7 +486,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas if(testTemplate) { logger.info("--> creating test template"); - assertThat(client.admin().indices().preparePutTemplate("test-template").setTemplate("te*").addMapping("test-mapping", XContentFactory.jsonBuilder().startObject().startObject("test-mapping").startObject("properties") + assertThat(client.admin().indices().preparePutTemplate("test-template").setPatterns(Collections.singletonList("te*")).addMapping("test-mapping", XContentFactory.jsonBuilder().startObject().startObject("test-mapping").startObject("properties") .startObject("field1").field("type", "string").field("store", "yes").endObject() .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() .endObject().endObject().endObject()).get().isAcknowledged(), equalTo(true)); @@ -1499,8 +1499,8 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .put("location", repositoryLocation) .put("compress", randomBoolean()) .put("chunk_size", randomIntBetween(1000, 10000), ByteSizeUnit.BYTES) - .put("max_restore_bytes_per_sec", throttleRestore ? "0.5k" : "0") - .put("max_snapshot_bytes_per_sec", throttleSnapshot ? "0.5k" : "0"))); + .put("max_restore_bytes_per_sec", throttleRestore ? "10k" : "0") + .put("max_snapshot_bytes_per_sec", throttleSnapshot ? "10k" : "0"))); createIndex("test-idx"); ensureGreen(); @@ -2672,4 +2672,61 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertEquals("IndexShardSnapshotFailedException[Aborted]", snapshotInfo.shardFailures().get(0).reason()); } + public void testSnapshotSucceedsAfterSnapshotFailure() throws Exception { + logger.info("--> creating repository"); + final Path repoPath = randomRepoPath(); + final Client client = client(); + assertAcked(client.admin().cluster().preparePutRepository("test-repo").setType("mock").setVerify(false).setSettings( + Settings.builder() + .put("location", repoPath) + .put("random_control_io_exception_rate", randomIntBetween(5, 20) / 100f) + .put("random", randomAsciiOfLength(10)))); + + logger.info("--> indexing some data"); + assertAcked(prepareCreate("test-idx").setSettings( + // the less the number of shards, the less control files we have, so we are giving a higher probability of + // triggering an IOException toward the end when writing the pending-index-* files, which are the files + // that caused problems with writing subsequent snapshots if they happened to be lingering in the repository + Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0))); + ensureGreen(); + final int numDocs = randomIntBetween(1, 5); + for (int i = 0; i < numDocs; i++) { + index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i); + } + refresh(); + assertThat(client.prepareSearch("test-idx").setSize(0).get().getHits().totalHits(), equalTo((long) numDocs)); + + logger.info("--> snapshot with potential I/O failures"); + try { + CreateSnapshotResponse createSnapshotResponse = + client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap") + .setWaitForCompletion(true) + .setIndices("test-idx") + .get(); + if (createSnapshotResponse.getSnapshotInfo().totalShards() != createSnapshotResponse.getSnapshotInfo().successfulShards()) { + assertThat(getFailureCount("test-repo"), greaterThan(0L)); + assertThat(createSnapshotResponse.getSnapshotInfo().shardFailures().size(), greaterThan(0)); + for (SnapshotShardFailure shardFailure : createSnapshotResponse.getSnapshotInfo().shardFailures()) { + assertThat(shardFailure.reason(), containsString("Random IOException")); + } + } + } catch (SnapshotCreationException | RepositoryException ex) { + // sometimes, the snapshot will fail with a top level I/O exception + assertThat(ExceptionsHelper.stackTrace(ex), containsString("Random IOException")); + } + + logger.info("--> snapshot with no I/O failures"); + assertAcked(client.admin().cluster().preparePutRepository("test-repo-2").setType("mock").setSettings( + Settings.builder().put("location", repoPath))); + CreateSnapshotResponse createSnapshotResponse = + client.admin().cluster().prepareCreateSnapshot("test-repo-2", "test-snap-2") + .setWaitForCompletion(true) + .setIndices("test-idx") + .get(); + assertEquals(0, createSnapshotResponse.getSnapshotInfo().failedShards()); + GetSnapshotsResponse getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots("test-repo-2") + .addSnapshots("test-snap-2").get(); + assertEquals(SnapshotState.SUCCESS, getSnapshotsResponse.getSnapshots().get(0).state()); + } + } diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index ca3aeb674bd..7fb717cd7c2 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -33,6 +33,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; +import com.carrotsearch.randomizedtesting.RandomizedContext; import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.MetaData; @@ -321,14 +322,28 @@ public class MockRepository extends FsRepository { @Override public void move(String sourceBlob, String targetBlob) throws IOException { - maybeIOExceptionOrBlock(targetBlob); - super.move(sourceBlob, targetBlob); + if (RandomizedContext.current().getRandom().nextBoolean()) { + // simulate a non-atomic move, since many blob container implementations + // will not have an atomic move, and we should be able to handle that + maybeIOExceptionOrBlock(targetBlob); + super.writeBlob(targetBlob, super.readBlob(sourceBlob), 0L); + super.deleteBlob(sourceBlob); + } else { + // atomic move since this inherits from FsBlobContainer which provides atomic moves + maybeIOExceptionOrBlock(targetBlob); + super.move(sourceBlob, targetBlob); + } } @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { maybeIOExceptionOrBlock(blobName); super.writeBlob(blobName, inputStream, blobSize); + if (RandomizedContext.current().getRandom().nextBoolean()) { + // for network based repositories, the blob may have been written but we may still + // get an error with the client connection, so an IOException here simulates this + maybeIOExceptionOrBlock(blobName); + } } } } diff --git a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java index da1dcf43e5d..0525f4a32dc 100644 --- a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java @@ -19,8 +19,25 @@ package org.elasticsearch.transport; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.support.TransportStatus; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; /** Unit tests for TCPTransport */ public class TCPTransportTests extends ESTestCase { @@ -127,4 +144,103 @@ public class TCPTransportTests extends ESTestCase { assertEquals(101, addresses[1].getPort()); assertEquals(102, addresses[2].getPort()); } + + public void testCompressRequest() throws IOException { + final boolean compressed = randomBoolean(); + final AtomicBoolean called = new AtomicBoolean(false); + Req request = new Req(randomRealisticUnicodeOfLengthBetween(10, 100)); + ThreadPool threadPool = new TestThreadPool(TCPTransportTests.class.getName()); + try { + TcpTransport transport = new TcpTransport("test", Settings.builder().put("transport.tcp.compress", compressed).build(), + threadPool, new BigArrays(Settings.EMPTY, null), null, null, null) { + @Override + protected InetSocketAddress getLocalAddress(Object o) { + return null; + } + + @Override + protected Object bind(String name, InetSocketAddress address) throws IOException { + return null; + } + + @Override + protected void closeChannels(List channel) throws IOException { + + } + + @Override + protected NodeChannels connectToChannelsLight(DiscoveryNode node) throws IOException { + return new NodeChannels(new Object[0], new Object[0], new Object[0], new Object[0], new Object[0]); + } + + @Override + protected void sendMessage(Object o, BytesReference reference, Runnable sendListener) throws IOException { + StreamInput streamIn = reference.streamInput(); + streamIn.skip(TcpHeader.MARKER_BYTES_SIZE); + int len = streamIn.readInt(); + long requestId = streamIn.readLong(); + assertEquals(42, requestId); + byte status = streamIn.readByte(); + Version version = Version.fromId(streamIn.readInt()); + assertEquals(Version.CURRENT, version); + assertEquals(compressed, TransportStatus.isCompress(status)); + called.compareAndSet(false, true); + if (compressed) { + final int bytesConsumed = TcpHeader.HEADER_SIZE; + streamIn = CompressorFactory.compressor(reference.slice(bytesConsumed, reference.length() - bytesConsumed)) + .streamInput(streamIn); + } + threadPool.getThreadContext().readHeaders(streamIn); + assertEquals("foobar", streamIn.readString()); + Req readReq = new Req(""); + readReq.readFrom(streamIn); + assertEquals(request.value, readReq.value); + } + + @Override + protected NodeChannels connectToChannels(DiscoveryNode node) throws IOException { + return new NodeChannels(new Object[0], new Object[0], new Object[0], new Object[0], new Object[0]); + } + + @Override + protected boolean isOpen(Object o) { + return false; + } + + @Override + public long serverOpen() { + return 0; + } + + @Override + protected Object nodeChannel(DiscoveryNode node, TransportRequestOptions options) throws ConnectTransportException { + return new NodeChannels(new Object[0], new Object[0], new Object[0], new Object[0], new Object[0]); + } + }; + DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Version.CURRENT); + transport.sendRequest(node, 42, "foobar", request, TransportRequestOptions.EMPTY); + assertTrue(called.get()); + } finally { + ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); + } + } + + private static final class Req extends TransportRequest { + public String value; + + private Req(String value) { + this.value = value; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + value = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(value); + } + } + } diff --git a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java b/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java index cdeec658447..39f1c774634 100644 --- a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java +++ b/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java @@ -115,7 +115,7 @@ public class TimestampTTLBWIT extends ESIntegTestCase { try { client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)).execute().actionGet(); + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())).execute().actionGet(); fail(); } catch (DocumentMissingException e) { // all is well @@ -127,15 +127,15 @@ public class TimestampTTLBWIT extends ESIntegTestCase { long ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); assertThat(ttl, greaterThan(0L)); client().prepareUpdate(indexOrAlias(), "type1", "2") - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)).execute().actionGet(); + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet(); ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); assertThat(ttl, greaterThan(0L)); // check TTL update client().prepareUpdate(indexOrAlias(), "type1", "2") - .setScript(new Script("", ScriptType.INLINE, "put_values", - Collections.singletonMap("_ctx", Collections.singletonMap("_ttl", 3600000)))).execute().actionGet(); + .setScript(new Script(ScriptType.INLINE, "put_values", "", + Collections.singletonMap("_ctx", Collections.singletonMap("_ttl", 3600000)))).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet(); ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); assertThat(ttl, greaterThan(0L)); @@ -144,8 +144,8 @@ public class TimestampTTLBWIT extends ESIntegTestCase { // check timestamp update client().prepareIndex("test", "type1", "3").setSource("field", 1).setRefreshPolicy(IMMEDIATE).get(); client().prepareUpdate(indexOrAlias(), "type1", "3") - .setScript(new Script("", ScriptType.INLINE, "put_values", - Collections.singletonMap("_ctx", Collections.singletonMap("_timestamp", "2009-11-15T14:12:12")))).execute() + .setScript(new Script(ScriptType.INLINE, "put_values", "", + Collections.singletonMap("_ctx", Collections.singletonMap("_timestamp", "2009-11-15T14:12:12")))).execute() .actionGet(); getResponse = client().prepareGet("test", "type1", "3").setStoredFields("_timestamp").execute().actionGet(); long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); @@ -198,7 +198,7 @@ public class TimestampTTLBWIT extends ESIntegTestCase { // Update the first object and note context variables values UpdateResponse updateResponse = client().prepareUpdate("test", "subtype1", "id1") .setRouting("routing1") - .setScript(new Script("", ScriptType.INLINE, "extract_ctx", null)) + .setScript(new Script(ScriptType.INLINE, "extract_ctx", "", Collections.emptyMap())) .execute().actionGet(); assertEquals(2, updateResponse.getVersion()); @@ -215,7 +215,7 @@ public class TimestampTTLBWIT extends ESIntegTestCase { // Idem with the second object updateResponse = client().prepareUpdate("test", "type1", "parentId1") - .setScript(new Script("", ScriptType.INLINE, "extract_ctx", null)) + .setScript(new Script(ScriptType.INLINE, "extract_ctx", "", Collections.emptyMap())) .execute().actionGet(); assertEquals(2, updateResponse.getVersion()); diff --git a/core/src/test/java/org/elasticsearch/update/UpdateByNativeScriptIT.java b/core/src/test/java/org/elasticsearch/update/UpdateByNativeScriptIT.java index bad89b7ab62..59826608456 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateByNativeScriptIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateByNativeScriptIT.java @@ -57,7 +57,7 @@ public class UpdateByNativeScriptIT extends ESIntegTestCase { Map params = new HashMap<>(); params.put("foo", "SETVALUE"); client().prepareUpdate("test", "type", "1") - .setScript(new Script("custom", ScriptType.INLINE, NativeScriptEngineService.NAME, params)).get(); + .setScript(new Script(ScriptType.INLINE, NativeScriptEngineService.NAME, "custom", params)).get(); Map data = client().prepareGet("test", "type", "1").get().getSource(); assertThat(data, hasKey("foo")); diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index b662f7034c5..a3903023edf 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -369,7 +369,7 @@ public class UpdateIT extends ESIntegTestCase { UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)) + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())) .execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -381,7 +381,7 @@ public class UpdateIT extends ESIntegTestCase { updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)) + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())) .execute().actionGet(); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -410,7 +410,7 @@ public class UpdateIT extends ESIntegTestCase { UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("balance", openingBalance).endObject()) .setScriptedUpsert(true) - .setScript(new Script("", ScriptType.INLINE, "scripted_upsert", params)) + .setScript(new Script(ScriptType.INLINE, "scripted_upsert", "", params)) .execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -424,7 +424,7 @@ public class UpdateIT extends ESIntegTestCase { updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("balance", openingBalance).endObject()) .setScriptedUpsert(true) - .setScript(new Script("", ScriptType.INLINE, "scripted_upsert", params)) + .setScript(new Script(ScriptType.INLINE, "scripted_upsert", "", params)) .execute().actionGet(); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -468,7 +468,7 @@ public class UpdateIT extends ESIntegTestCase { UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) - .setScript(new Script("", ScriptType.INLINE, "put_values", Collections.singletonMap("extra", "foo"))) + .setScript(new Script(ScriptType.INLINE, "put_values", "", Collections.singletonMap("extra", "foo"))) .setFetchSource(true) .execute().actionGet(); @@ -480,7 +480,7 @@ public class UpdateIT extends ESIntegTestCase { updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) - .setScript(new Script("", ScriptType.INLINE, "put_values", Collections.singletonMap("extra", "foo"))) + .setScript(new Script(ScriptType.INLINE, "put_values", "", Collections.singletonMap("extra", "foo"))) .setFields("_source") .execute().actionGet(); @@ -498,24 +498,24 @@ public class UpdateIT extends ESIntegTestCase { index("test", "type", "1", "text", "value"); // version is now 1 assertThrows(client().prepareUpdate(indexOrAlias(), "type", "1") - .setScript(new Script("", ScriptType.INLINE, "put_values", Collections.singletonMap("text", "v2"))).setVersion(2) + .setScript(new Script(ScriptType.INLINE, "put_values", "", Collections.singletonMap("text", "v2"))).setVersion(2) .execute(), VersionConflictEngineException.class); client().prepareUpdate(indexOrAlias(), "type", "1") - .setScript(new Script("", ScriptType.INLINE, "put_values", Collections.singletonMap("text", "v2"))).setVersion(1).get(); + .setScript(new Script(ScriptType.INLINE, "put_values", "", Collections.singletonMap("text", "v2"))).setVersion(1).get(); assertThat(client().prepareGet("test", "type", "1").get().getVersion(), equalTo(2L)); // and again with a higher version.. client().prepareUpdate(indexOrAlias(), "type", "1") - .setScript(new Script("", ScriptType.INLINE, "put_values", Collections.singletonMap("text", "v3"))).setVersion(2).get(); + .setScript(new Script(ScriptType.INLINE, "put_values", "", Collections.singletonMap("text", "v3"))).setVersion(2).get(); assertThat(client().prepareGet("test", "type", "1").get().getVersion(), equalTo(3L)); // after delete client().prepareDelete("test", "type", "1").get(); assertThrows(client().prepareUpdate("test", "type", "1") - .setScript(new Script("", ScriptType.INLINE, "put_values", Collections.singletonMap("text", "v2"))).setVersion(3) + .setScript(new Script(ScriptType.INLINE, "put_values", "", Collections.singletonMap("text", "v2"))).setVersion(3) .execute(), DocumentMissingException.class); @@ -523,7 +523,7 @@ public class UpdateIT extends ESIntegTestCase { client().prepareIndex("test", "type", "2").setSource("text", "value").setVersion(10).setVersionType(VersionType.EXTERNAL).get(); assertThrows(client().prepareUpdate(indexOrAlias(), "type", "2") - .setScript(new Script("", ScriptType.INLINE, "put_values", Collections.singletonMap("text", "v2"))).setVersion(2) + .setScript(new Script(ScriptType.INLINE, "put_values", "", Collections.singletonMap("text", "v2"))).setVersion(2) .setVersionType(VersionType.EXTERNAL).execute(), ActionRequestValidationException.class); @@ -535,7 +535,7 @@ public class UpdateIT extends ESIntegTestCase { // With internal versions, tt means "if object is there with version X, update it or explode. If it is not there, index. client().prepareUpdate(indexOrAlias(), "type", "3") - .setScript(new Script("", ScriptType.INLINE, "put_values", Collections.singletonMap("text", "v2"))) + .setScript(new Script(ScriptType.INLINE, "put_values", "", Collections.singletonMap("text", "v2"))) .setVersion(10).setUpsert("{ \"text\": \"v0\" }").get(); get = get("test", "type", "3"); assertThat(get.getVersion(), equalTo(1L)); @@ -548,7 +548,7 @@ public class UpdateIT extends ESIntegTestCase { public void testIndexAutoCreation() throws Exception { UpdateResponse updateResponse = client().prepareUpdate("test", "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) - .setScript(new Script("", ScriptType.INLINE, "put_values", Collections.singletonMap("extra", "foo"))) + .setScript(new Script(ScriptType.INLINE, "put_values", "", Collections.singletonMap("extra", "foo"))) .setFetchSource(true) .execute().actionGet(); @@ -565,7 +565,7 @@ public class UpdateIT extends ESIntegTestCase { try { client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)).execute().actionGet(); + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())).execute().actionGet(); fail(); } catch (DocumentMissingException e) { // all is well @@ -574,7 +574,7 @@ public class UpdateIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)).execute().actionGet(); + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(2L)); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -587,7 +587,7 @@ public class UpdateIT extends ESIntegTestCase { Map params = new HashMap<>(); params.put("inc", 3); updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script("field", ScriptType.INLINE, "field_inc", params)).execute().actionGet(); + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", params)).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(3L)); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -599,7 +599,7 @@ public class UpdateIT extends ESIntegTestCase { // check noop updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script("", ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("op", "none")))).execute().actionGet(); + .setScript(new Script(ScriptType.INLINE, "put_values", "", Collections.singletonMap("_ctx", Collections.singletonMap("op", "none")))).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(3L)); assertEquals(DocWriteResponse.Result.NOOP, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -611,7 +611,7 @@ public class UpdateIT extends ESIntegTestCase { // check delete updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script("", ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("op", "delete")))).execute().actionGet(); + .setScript(new Script(ScriptType.INLINE, "put_values", "", Collections.singletonMap("_ctx", Collections.singletonMap("op", "delete")))).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(4L)); assertEquals(DocWriteResponse.Result.DELETED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -624,7 +624,7 @@ public class UpdateIT extends ESIntegTestCase { // check fields parameter client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)) + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())) .setFields("field") .setFetchSource(true) .execute().actionGet(); @@ -637,7 +637,7 @@ public class UpdateIT extends ESIntegTestCase { // check _source parameter client().prepareIndex("test", "type1", "1").setSource("field1", 1, "field2", 2).execute().actionGet(); updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script("field1", ScriptType.INLINE, "field_inc", null)) + .setScript(new Script(ScriptType.INLINE, "field_inc", "field1", Collections.emptyMap())) .setFetchSource("field1", "field2") .get(); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -700,7 +700,7 @@ public class UpdateIT extends ESIntegTestCase { try { client().prepareUpdate(indexOrAlias(), "type1", "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)) + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())) .execute().actionGet(); fail("Should have thrown ActionRequestValidationException"); } catch (ActionRequestValidationException e) { @@ -715,7 +715,7 @@ public class UpdateIT extends ESIntegTestCase { ensureGreen(); try { client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)) + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())) .setDocAsUpsert(true) .execute().actionGet(); fail("Should have thrown ActionRequestValidationException"); @@ -767,7 +767,7 @@ public class UpdateIT extends ESIntegTestCase { // Update the first object and note context variables values UpdateResponse updateResponse = client().prepareUpdate("test", "subtype1", "id1") .setRouting("routing1") - .setScript(new Script("", ScriptType.INLINE, "extract_ctx", null)) + .setScript(new Script(ScriptType.INLINE, "extract_ctx", "", Collections.emptyMap())) .execute().actionGet(); assertEquals(2, updateResponse.getVersion()); @@ -783,7 +783,7 @@ public class UpdateIT extends ESIntegTestCase { // Idem with the second object updateResponse = client().prepareUpdate("test", "type1", "parentId1") - .setScript(new Script("", ScriptType.INLINE, "extract_ctx", null)) + .setScript(new Script(ScriptType.INLINE, "extract_ctx", "", Collections.emptyMap())) .execute().actionGet(); assertEquals(2, updateResponse.getVersion()); @@ -822,13 +822,13 @@ public class UpdateIT extends ESIntegTestCase { } if (useBulkApi) { UpdateRequestBuilder updateRequestBuilder = client().prepareUpdate(indexOrAlias(), "type1", Integer.toString(i)) - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)) + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()); client().prepareBulk().add(updateRequestBuilder).execute().actionGet(); } else { client().prepareUpdate(indexOrAlias(), "type1", Integer.toString(i)) - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)) + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) .execute().actionGet(); @@ -948,7 +948,7 @@ public class UpdateIT extends ESIntegTestCase { updateRequestsOutstanding.acquire(); try { UpdateRequest ur = client().prepareUpdate("test", "type1", Integer.toString(j)) - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)) + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())) .setRetryOnConflict(retryOnConflict) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) .request(); @@ -1048,7 +1048,7 @@ public class UpdateIT extends ESIntegTestCase { //All the previous operations should be complete or failed at this point for (int i = 0; i < numberOfIdsPerThread; ++i) { UpdateResponse ur = client().prepareUpdate("test", "type1", Integer.toString(i)) - .setScript(new Script("field", ScriptType.INLINE, "field_inc", null)) + .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) .execute().actionGet(); diff --git a/core/src/test/resources/indices/bwc/index-2.0.0-beta1.zip b/core/src/test/resources/indices/bwc/index-2.0.0-beta1.zip index ca3d11099ce..6d609479552 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.0-beta1.zip and b/core/src/test/resources/indices/bwc/index-2.0.0-beta1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.0.0-beta2.zip b/core/src/test/resources/indices/bwc/index-2.0.0-beta2.zip index 47496a9f012..6732f715cfe 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.0-beta2.zip and b/core/src/test/resources/indices/bwc/index-2.0.0-beta2.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.0.0-rc1.zip b/core/src/test/resources/indices/bwc/index-2.0.0-rc1.zip index 3b459959410..8c440725e9c 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.0-rc1.zip and b/core/src/test/resources/indices/bwc/index-2.0.0-rc1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.0.0.zip b/core/src/test/resources/indices/bwc/index-2.0.0.zip index 2dae323f69e..cc0a0ae5320 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.0.zip and b/core/src/test/resources/indices/bwc/index-2.0.0.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.0.1.zip b/core/src/test/resources/indices/bwc/index-2.0.1.zip index 2d0d5f42d50..81a31d18f81 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.1.zip and b/core/src/test/resources/indices/bwc/index-2.0.1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.0.2.zip b/core/src/test/resources/indices/bwc/index-2.0.2.zip index f6a9492b33f..63be140108c 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.2.zip and b/core/src/test/resources/indices/bwc/index-2.0.2.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.1.0.zip b/core/src/test/resources/indices/bwc/index-2.1.0.zip index 347d9cb31e9..dff157c2ab2 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.1.0.zip and b/core/src/test/resources/indices/bwc/index-2.1.0.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.1.1.zip b/core/src/test/resources/indices/bwc/index-2.1.1.zip index 6981c9af4a9..b7c408e5597 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.1.1.zip and b/core/src/test/resources/indices/bwc/index-2.1.1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.1.2.zip b/core/src/test/resources/indices/bwc/index-2.1.2.zip index 57162675b14..d6a4f9fddfa 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.1.2.zip and b/core/src/test/resources/indices/bwc/index-2.1.2.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.2.0.zip b/core/src/test/resources/indices/bwc/index-2.2.0.zip index 81ff74d5abf..5c9eba5c616 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.2.0.zip and b/core/src/test/resources/indices/bwc/index-2.2.0.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.2.1.zip b/core/src/test/resources/indices/bwc/index-2.2.1.zip index 7e640e4158f..3596820a44a 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.2.1.zip and b/core/src/test/resources/indices/bwc/index-2.2.1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.2.2.zip b/core/src/test/resources/indices/bwc/index-2.2.2.zip index f6c5c7653d1..788ba0712b5 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.2.2.zip and b/core/src/test/resources/indices/bwc/index-2.2.2.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.3.0.zip b/core/src/test/resources/indices/bwc/index-2.3.0.zip index c09e5d8ba19..212d3f8c7cf 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.3.0.zip and b/core/src/test/resources/indices/bwc/index-2.3.0.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.3.1.zip b/core/src/test/resources/indices/bwc/index-2.3.1.zip index de10f7926df..b825872bb55 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.3.1.zip and b/core/src/test/resources/indices/bwc/index-2.3.1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.3.2.zip b/core/src/test/resources/indices/bwc/index-2.3.2.zip index eff6c8cd156..f6b8ec502d9 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.3.2.zip and b/core/src/test/resources/indices/bwc/index-2.3.2.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.3.3.zip b/core/src/test/resources/indices/bwc/index-2.3.3.zip index 751819741b3..e349aac5376 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.3.3.zip and b/core/src/test/resources/indices/bwc/index-2.3.3.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.3.4.zip b/core/src/test/resources/indices/bwc/index-2.3.4.zip index b69f100398a..bc75ad093cf 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.3.4.zip and b/core/src/test/resources/indices/bwc/index-2.3.4.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.3.5.zip b/core/src/test/resources/indices/bwc/index-2.3.5.zip index dd64e699954..c01af7a2062 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.3.5.zip and b/core/src/test/resources/indices/bwc/index-2.3.5.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.4.0.zip b/core/src/test/resources/indices/bwc/index-2.4.0.zip index 14bd436b164..5055ded5f87 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.4.0.zip and b/core/src/test/resources/indices/bwc/index-2.4.0.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.4.1.zip b/core/src/test/resources/indices/bwc/index-2.4.1.zip index 2677b88f3db..6dc29439a0f 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.4.1.zip and b/core/src/test/resources/indices/bwc/index-2.4.1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-5.0.0.zip b/core/src/test/resources/indices/bwc/index-5.0.0.zip index 422b2587760..f8deb41276b 100644 Binary files a/core/src/test/resources/indices/bwc/index-5.0.0.zip and b/core/src/test/resources/indices/bwc/index-5.0.0.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0-beta1.zip b/core/src/test/resources/indices/bwc/repo-2.0.0-beta1.zip index 4a46dbc8382..b32f2a48d74 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.0-beta1.zip and b/core/src/test/resources/indices/bwc/repo-2.0.0-beta1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0-beta2.zip b/core/src/test/resources/indices/bwc/repo-2.0.0-beta2.zip index 6e4080a9146..2b2663a4201 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.0-beta2.zip and b/core/src/test/resources/indices/bwc/repo-2.0.0-beta2.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0-rc1.zip b/core/src/test/resources/indices/bwc/repo-2.0.0-rc1.zip index deb36fee119..59b31f5cc3b 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.0-rc1.zip and b/core/src/test/resources/indices/bwc/repo-2.0.0-rc1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0.zip b/core/src/test/resources/indices/bwc/repo-2.0.0.zip index 8042696cb90..ae6668be1cf 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.0.zip and b/core/src/test/resources/indices/bwc/repo-2.0.0.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.1.zip b/core/src/test/resources/indices/bwc/repo-2.0.1.zip index 6e9b3d0aede..c675125226c 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.1.zip and b/core/src/test/resources/indices/bwc/repo-2.0.1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.2.zip b/core/src/test/resources/indices/bwc/repo-2.0.2.zip index 4dd61b0f26a..2e1062c294b 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.2.zip and b/core/src/test/resources/indices/bwc/repo-2.0.2.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.1.0.zip b/core/src/test/resources/indices/bwc/repo-2.1.0.zip index b641e0b5bba..fdaf6321421 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.1.0.zip and b/core/src/test/resources/indices/bwc/repo-2.1.0.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.1.1.zip b/core/src/test/resources/indices/bwc/repo-2.1.1.zip index e08cde10b33..e29cf401628 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.1.1.zip and b/core/src/test/resources/indices/bwc/repo-2.1.1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.1.2.zip b/core/src/test/resources/indices/bwc/repo-2.1.2.zip index f9829c219f0..f1c371720c5 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.1.2.zip and b/core/src/test/resources/indices/bwc/repo-2.1.2.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.2.0.zip b/core/src/test/resources/indices/bwc/repo-2.2.0.zip index 703184dac1e..1501c1942ad 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.2.0.zip and b/core/src/test/resources/indices/bwc/repo-2.2.0.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.2.1.zip b/core/src/test/resources/indices/bwc/repo-2.2.1.zip index c665f79c11c..93e39514c3c 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.2.1.zip and b/core/src/test/resources/indices/bwc/repo-2.2.1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.2.2.zip b/core/src/test/resources/indices/bwc/repo-2.2.2.zip index 9e5e6fdd30d..5c937a3c6c1 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.2.2.zip and b/core/src/test/resources/indices/bwc/repo-2.2.2.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.3.0.zip b/core/src/test/resources/indices/bwc/repo-2.3.0.zip index f41df41224d..575232e09df 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.3.0.zip and b/core/src/test/resources/indices/bwc/repo-2.3.0.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.3.1.zip b/core/src/test/resources/indices/bwc/repo-2.3.1.zip index 78e736986ab..f0434a446f0 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.3.1.zip and b/core/src/test/resources/indices/bwc/repo-2.3.1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.3.2.zip b/core/src/test/resources/indices/bwc/repo-2.3.2.zip index b160856326a..c5ca8a34325 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.3.2.zip and b/core/src/test/resources/indices/bwc/repo-2.3.2.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.3.3.zip b/core/src/test/resources/indices/bwc/repo-2.3.3.zip index 411cbea5a22..4310f8e1efb 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.3.3.zip and b/core/src/test/resources/indices/bwc/repo-2.3.3.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.3.4.zip b/core/src/test/resources/indices/bwc/repo-2.3.4.zip index 4afa60f7c78..6abccc237b6 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.3.4.zip and b/core/src/test/resources/indices/bwc/repo-2.3.4.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.3.5.zip b/core/src/test/resources/indices/bwc/repo-2.3.5.zip index 5d2d00de961..f80ed069180 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.3.5.zip and b/core/src/test/resources/indices/bwc/repo-2.3.5.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.4.0.zip b/core/src/test/resources/indices/bwc/repo-2.4.0.zip index c5f3c0d0759..09591e1d7ed 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.4.0.zip and b/core/src/test/resources/indices/bwc/repo-2.4.0.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.4.1.zip b/core/src/test/resources/indices/bwc/repo-2.4.1.zip index 16a2f68bb33..2c88ecebe31 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.4.1.zip and b/core/src/test/resources/indices/bwc/repo-2.4.1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-5.0.0.zip b/core/src/test/resources/indices/bwc/repo-5.0.0.zip index 7145bb1ceec..e873b9bc9a8 100644 Binary files a/core/src/test/resources/indices/bwc/repo-5.0.0.zip and b/core/src/test/resources/indices/bwc/repo-5.0.0.zip differ diff --git a/core/src/test/resources/org/elasticsearch/search/query/all-query-index-with-all.json b/core/src/test/resources/org/elasticsearch/search/query/all-query-index-with-all.json new file mode 100644 index 00000000000..05de13b4261 --- /dev/null +++ b/core/src/test/resources/org/elasticsearch/search/query/all-query-index-with-all.json @@ -0,0 +1,35 @@ +{ + "settings": { + "index": { + "number_of_shards": 1, + "number_of_replicas": 0, + "analysis": { + "analyzer": { + "my_ngrams": { + "type": "custom", + "tokenizer": "standard", + "filter": ["my_ngrams"] + } + }, + "filter": { + "my_ngrams": { + "type": "ngram", + "min_gram": 2, + "max_gram": 2 + } + } + } + } + }, + "mappings": { + "doc": { + "_all": { + "enabled": true + }, + "properties": { + "f1": {"type": "text"}, + "f2": {"type": "text", "analyzer": "my_ngrams"} + } + } + } +} diff --git a/dev-tools/create_bwc_index.py b/dev-tools/create_bwc_index.py index 2e6b92d8968..5751335d1e3 100644 --- a/dev-tools/create_bwc_index.py +++ b/dev-tools/create_bwc_index.py @@ -103,7 +103,7 @@ def delete_by_query(es, version, index_name, doc_type): return deleted_count = es.count(index=index_name, doc_type=doc_type, body=query)['count'] - + result = es.delete_by_query(index=index_name, doc_type=doc_type, body=query) @@ -113,9 +113,13 @@ def delete_by_query(es, version, index_name, doc_type): logging.info('Deleted %d docs' % deleted_count) -def run_basic_asserts(es, index_name, type, num_docs): +def run_basic_asserts(es, version, index_name, type, num_docs): count = es.count(index=index_name)['count'] assert count == num_docs, 'Expected %r but got %r documents' % (num_docs, count) + if parse_version(version) < parse_version('5.1.0'): + # This alias isn't allowed to be created after 5.1 so we can verify that we can still use it + count = es.count(index='#' + index_name)['count'] + assert count == num_docs, 'Expected %r but got %r documents' % (num_docs, count) for _ in range(0, num_docs): random_doc_id = random.randint(0, num_docs-1) doc = es.get(index=index_name, doc_type=type, id=random_doc_id) @@ -360,8 +364,11 @@ def generate_index(client, version, index_name): # see https://github.com/elastic/elasticsearch/issues/5817 num_docs = int(num_docs / 10) index_documents(client, index_name, 'doc', num_docs, supports_dots_in_field_names) + if parse_version(version) < parse_version('5.1.0'): + logging.info("Adding a alias that can't be created in 5.1+ so we can assert that we can still use it") + client.indices.put_alias(index=index_name, name='#' + index_name) logging.info('Running basic asserts on the data added') - run_basic_asserts(client, index_name, 'doc', num_docs) + run_basic_asserts(client, version, index_name, 'doc', num_docs) return num_docs, supports_dots_in_field_names def snapshot_index(client, version, repo_dir): @@ -494,7 +501,7 @@ def create_bwc_index(cfg, version): if node is not None: # This only happens if we've hit an exception: shutdown_node(node) - + shutil.rmtree(tmp_dir) def shutdown_node(node): @@ -533,4 +540,3 @@ if __name__ == '__main__': main() except KeyboardInterrupt: print('Caught keyboard interrupt, exiting...') - diff --git a/distribution/deb/src/main/packaging/init.d/elasticsearch b/distribution/deb/src/main/packaging/init.d/elasticsearch index ae0bd44c291..cf8b5351aec 100755 --- a/distribution/deb/src/main/packaging/init.d/elasticsearch +++ b/distribution/deb/src/main/packaging/init.d/elasticsearch @@ -84,6 +84,7 @@ DAEMON_OPTS="-d -p $PID_FILE -Edefault.path.logs=$LOG_DIR -Edefault.path.data=$D export ES_JAVA_OPTS export JAVA_HOME export ES_INCLUDE +export ES_JVM_OPTIONS if [ ! -x "$DAEMON" ]; then echo "The elasticsearch startup script does not exists or it is not executable, tried: $DAEMON" diff --git a/distribution/rpm/src/main/packaging/init.d/elasticsearch b/distribution/rpm/src/main/packaging/init.d/elasticsearch index 8f1d93dcbdc..f991dc2f928 100644 --- a/distribution/rpm/src/main/packaging/init.d/elasticsearch +++ b/distribution/rpm/src/main/packaging/init.d/elasticsearch @@ -60,9 +60,10 @@ prog="elasticsearch" pidfile="$PID_DIR/${prog}.pid" export ES_JAVA_OPTS -export ES_STARTUP_SLEEP_TIME export JAVA_HOME export ES_INCLUDE +export ES_JVM_OPTIONS +export ES_STARTUP_SLEEP_TIME lockfile=/var/lock/subsys/$prog diff --git a/distribution/src/main/packaging/scripts/postinst b/distribution/src/main/packaging/scripts/postinst index 6d19e5f33c7..3d5eeeedd8f 100644 --- a/distribution/src/main/packaging/scripts/postinst +++ b/distribution/src/main/packaging/scripts/postinst @@ -51,6 +51,13 @@ case "$1" in ;; esac +# to pick up /usr/lib/sysctl.d/elasticsearch.conf +if command -v /usr/lib/systemd/systemd-sysctl > /dev/null; then + /usr/lib/systemd/systemd-sysctl +elif command -v /lib/systemd/systemd-sysctl > /dev/null; then + /lib/systemd/systemd-sysctl +fi + if [ "x$IS_UPGRADE" != "xtrue" ]; then if command -v systemctl >/dev/null; then echo "### NOT starting on installation, please execute the following statements to configure elasticsearch service to start automatically using systemd" diff --git a/distribution/src/main/resources/config/elasticsearch.yml b/distribution/src/main/resources/config/elasticsearch.yml index 0abff999bcb..3d732e74c0e 100644 --- a/distribution/src/main/resources/config/elasticsearch.yml +++ b/distribution/src/main/resources/config/elasticsearch.yml @@ -86,10 +86,6 @@ # # ---------------------------------- Various ----------------------------------- # -# Disable starting multiple nodes on a single system: -# -#node.max_local_storage_nodes: 1 -# # Require explicit names when deleting indices: # #action.destructive_requires_name: true diff --git a/distribution/src/main/resources/config/jvm.options b/distribution/src/main/resources/config/jvm.options index 63245f172bf..37c4d5b3c93 100644 --- a/distribution/src/main/resources/config/jvm.options +++ b/distribution/src/main/resources/config/jvm.options @@ -59,6 +59,9 @@ # use our provided JNA always versus the system one -Djna.nosys=true +# use old-style file permissions on JDK9 +-Djdk.io.permissionsUseCanonicalPath=true + # flags to keep Netty from being unsafe -Dio.netty.noUnsafe=true -Dio.netty.noKeySetOptimization=true diff --git a/docs/groovy-api/index.asciidoc b/docs/groovy-api/index.asciidoc index 3ed4ff9e2e6..a140c161769 100644 --- a/docs/groovy-api/index.asciidoc +++ b/docs/groovy-api/index.asciidoc @@ -1,7 +1,7 @@ = Groovy API :ref: http://www.elastic.co/guide/en/elasticsearch/reference/current :java: http://www.elastic.co/guide/en/elasticsearch/client/java-api/current -:version: 5.0.0-alpha5 +:version: 6.0.0-alpha1 [preface] == Preface diff --git a/docs/java-api/docs/index_.asciidoc b/docs/java-api/docs/index_.asciidoc index 2b29f15fabd..1e48fbd431c 100644 --- a/docs/java-api/docs/index_.asciidoc +++ b/docs/java-api/docs/index_.asciidoc @@ -60,8 +60,9 @@ json.put("message","trying out Elasticsearch"); [[java-docs-index-generate-beans]] ===== Serialize your beans -Elasticsearch already uses http://wiki.fasterxml.com/JacksonHome[Jackson]. -So you can use it to serialize your beans to JSON: +You can use http://wiki.fasterxml.com/JacksonHome[Jackson] to serialize +your beans to JSON. Please add http://search.maven.org/#search%7Cga%7C1%7Cjackson-databind[Jackson Databind] + to your project. Then you can use `ObjectMapper` to serialize your beans: [source,java] -------------------------------------------------- diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index 4c476391174..68559d884b9 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -35,6 +35,70 @@ For example, you can define the latest version in your `pom.xml` file: -------------------------------------------------- +=== Log4j 2 Logger + +You need to also include Log4j 2 dependencies: + +["source","xml",subs="attributes"] +-------------------------------------------------- + + org.apache.logging.log4j + log4j-api + 2.7 + + + org.apache.logging.log4j + log4j-core + 2.7 + +-------------------------------------------------- + +And also provide a Log4j 2 configuration file in your classpath. +For example, you can add in your `src/main/resources` project dir a `log4j2.properties` file like: + + +["source","properties",subs="attributes"] +-------------------------------------------------- +appender.console.type = Console +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = console +-------------------------------------------------- + +=== Using another Logger + +If you want to use another logger than Log4j 2, you can use http://www.slf4j.org/[SLF4J] bridge to do that: + +["source","xml",subs="attributes"] +-------------------------------------------------- + + org.apache.logging.log4j + log4j-to-slf4j + 2.7 + + + org.slf4j + slf4j-api + 1.7.21 + +-------------------------------------------------- + +http://www.slf4j.org/manual.html[This page] lists implementations you can use. Pick your favorite logger +and add it as a dependency. As an example, we will use the `slf4j-simple` logger: + +["source","xml",subs="attributes"] +-------------------------------------------------- + + org.slf4j + slf4j-simple + 1.7.21 + +-------------------------------------------------- + + == Dealing with JAR dependency conflicts If you want to use Elasticsearch in your Java application, you may have to deal with version conflicts with third party diff --git a/docs/java-rest/usage.asciidoc b/docs/java-rest/usage.asciidoc index 3bf5ee594a4..7ca8456d7d1 100644 --- a/docs/java-rest/usage.asciidoc +++ b/docs/java-rest/usage.asciidoc @@ -117,7 +117,7 @@ Response performRequest(String method, String endpoint, Response performRequest(String method, String endpoint, Map params, HttpEntity entity, - HttpAsyncResponseConsumer responseConsumer, + HttpAsyncResponseConsumerFactory responseConsumerFactory, Header... headers) throws IOException; @@ -141,7 +141,7 @@ void performRequestAsync(String method, String endpoint, Map params, HttpEntity entity, ResponseListener responseListener, - HttpAsyncResponseConsumer responseConsumer, + HttpAsyncResponseConsumerFactory responseConsumerFactory, Header... headers); -------------------------------------------------- @@ -155,11 +155,12 @@ call (e.g. `/_cluster/health`) `params`:: the optional parameters to be sent as querystring parameters `entity`:: the optional request body enclosed in an `org.apache.http.HttpEntity` object -`responseConsumer`:: the optional +`responseConsumerFactory`:: the optional factory that is used to create an http://hc.apache.org/httpcomponents-core-ga/httpcore-nio/apidocs/org/apache/http/nio/protocol/HttpAsyncResponseConsumer.html[`org.apache.http.nio.protocol.HttpAsyncResponseConsumer`] - callback. Controls how the response body gets streamed from a non-blocking -HTTP connection on the client side. When not provided, the default -implementation is used which buffers the whole response body in heap memory + callback instance per request attempt. Controls how the response body gets + streamed from a non-blocking HTTP connection on the client side. When not + provided, the default implementation is used which buffers the whole response + body in heap memory, up to 100 MB `responseListener`:: the listener to be notified upon asynchronous request success or failure `headers`:: optional request headers diff --git a/docs/plugins/analysis-icu.asciidoc b/docs/plugins/analysis-icu.asciidoc index 1a0f4b5e920..e2d65c95688 100644 --- a/docs/plugins/analysis-icu.asciidoc +++ b/docs/plugins/analysis-icu.asciidoc @@ -7,6 +7,23 @@ libraries, including better analysis of Asian languages, Unicode normalization, Unicode-aware case folding, collation support, and transliteration. +[IMPORTANT] +.ICU analysis and backwards compatibility +================================================ + +From time to time, the ICU library receives updates such as adding new +characters and emojis, and improving collation (sort) orders. These changes +may or may not affect search and sort orders, depending on which characters +sets you are using. + +While we restrict ICU upgrades to major versions, you may find that an index +created in the previous major version will need to be reindexed in order to +return correct (and correctly ordered) results, and to take advantage of new +characters. + +================================================ + + [[analysis-icu-install]] [float] ==== Installation diff --git a/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc b/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc index 9963b48fb04..17ca509e3c5 100644 --- a/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc @@ -76,7 +76,7 @@ When requesting detailed buckets (typically for displaying a "zoomed in" map) a "zoom1":{ "geohash_grid" : { "field":"location", - "precision":8, + "precision":8 } } } diff --git a/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc b/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc index 61deffeccd2..172a528fcb3 100644 --- a/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc @@ -353,7 +353,10 @@ Customized scores can be implemented via a script: -------------------------------------------------- "script_heuristic": { - "script": "_subset_freq/(_superset_freq - _subset_freq + 1)" + "script": { + "lang": "painless", + "inline": "params._subset_freq/(params._superset_freq - params._subset_freq + 1)" + } } -------------------------------------------------- diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 282b5528aa4..119bcdfd95c 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -84,11 +84,13 @@ Where: `date_format`:: is the optional format in which the computed date should be rendered. Defaults to `YYYY.MM.dd`. `time_zone`:: is the optional time zone . Defaults to `utc`. -You must enclose date math index name expressions within angle brackets. For example: +You must enclose date math index name expressions within angle brackets, and +all special characters should be URI encoded. For example: [source,js] ---------------------------------------------------------------------- -GET //_search +# GET //_search +GET /%3Clogstash-%7Bnow%2Fd%7D%3E/_search { "query" : { "match": { @@ -99,12 +101,12 @@ GET //_search ---------------------------------------------------------------------- // CONSOLE // TEST[s/^/PUT logstash-2016.09.20\n/] -// TEST[s/\{now\//{2016.09.20||%2f/] +// TEST[s/now/2016.09.20||/] [NOTE] .Percent encoding of date math characters ====================================================== -The special characters used for date rounding must be url encoded as follows: +The special characters used for date rounding must be URI encoded as follows: [horizontal] `<`:: `%3C` @@ -141,7 +143,8 @@ three days, assuming the indices use the default Logstash index name format, [source,js] ---------------------------------------------------------------------- -GET /,,/_search +# GET /,,/_search +GET /%3Clogstash-%7Bnow%2Fd-2d%7D%3E%2C%3Clogstash-%7Bnow%2Fd-1d%7D%3E%2C%3Clogstash-%7Bnow%2Fd%7D%3E/_search { "query" : { "match": { @@ -152,7 +155,7 @@ GET /,,/_searc ---------------------------------------------------------------------- // CONSOLE // TEST[s/^/PUT logstash-2016.09.20\nPUT logstash-2016.09.19\nPUT logstash-2016.09.18\n/] -// TEST[s/\{now/{2016.09.20||/] +// TEST[s/now/2016.09.20||/] [[common-options]] == Common options diff --git a/docs/reference/cat.asciidoc b/docs/reference/cat.asciidoc index e037907435d..6762d43e886 100644 --- a/docs/reference/cat.asciidoc +++ b/docs/reference/cat.asciidoc @@ -191,7 +191,7 @@ order by column3. [source,sh] -------------------------------------------------- -GET _cat/templates?v&s=order:desc,template +GET _cat/templates?v&s=order:desc,index_patterns -------------------------------------------------- //CONSOLE @@ -199,10 +199,10 @@ returns: [source,sh] -------------------------------------------------- -name template order version -pizza_pepperoni *pepperoni* 2 -sushi_california_roll *avocado* 1 1 -pizza_hawaiian *pineapples* 1 +name index_patterns order version +pizza_pepperoni [*pepperoni*] 2 +sushi_california_roll [*avocado*] 1 1 +pizza_hawaiian [*pineapples*] 1 -------------------------------------------------- -- diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index 94d954f0664..215cf8b8d7a 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -74,7 +74,7 @@ curl -XGET 'http://localhost:9200/_nodes/stats/process' curl -XGET 'http://localhost:9200/_nodes/10.0.0.1/stats/process' -------------------------------------------------- -The `all` flag can be set to return all the stats. +All stats can be explicitly requested via `/_nodes/stats/_all` or `/_nodes/stats?metric=_all`. [float] [[fs-info]] diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index 6279c3cae0f..f9025c378f6 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -935,3 +935,34 @@ GET metricbeat-2016.05.31-1/beat/1 The previous method can also be used in combination with <> to only load the existing data into the new index, but also rename fields if needed. + +[float] +=== Extracting a random subset of an index + +Reindex can be used to extract a random subset of an index for testing: + +[source,js] +---------------------------------------------------------------- +POST _reindex +{ + "size": 10, + "source": { + "index": "twitter", + "query": { + "function_score" : { + "query" : { "match_all": {} }, + "random_score" : {} + } + }, + "sort": "_score" <1> + }, + "dest": { + "index": "random_twitter" + } +} +---------------------------------------------------------------- +// CONSOLE +// TEST[setup:big_twitter] + +<1> Reindex defaults to sorting by `_doc` so `random_score` won't have any +effect unless you override the sort to `_score`. diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index d6875909216..20ffdd44b30 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -154,11 +154,12 @@ over indices created in the last three days, you could do the following: [source,js] -------------------------------------------------- -GET /,,/_search +# GET /,,/_search +GET /%3Clogs-%7Bnow%2Fd%7D-*%3E%2C%3Clogs-%7Bnow%2Fd-1d%7D-*%3E%2C%3Clogs-%7Bnow%2Fd-2d%7D-*%3E/_search -------------------------------------------------- // CONSOLE // TEST[continued] -// TEST[s/now\//now%2f/] +// TEST[s/now/2016.10.31||/] [float] === Defining the new index diff --git a/docs/reference/indices/templates.asciidoc b/docs/reference/indices/templates.asciidoc index 95cd32578a0..96ca5dcd1c7 100644 --- a/docs/reference/indices/templates.asciidoc +++ b/docs/reference/indices/templates.asciidoc @@ -3,8 +3,8 @@ Index templates allow you to define templates that will automatically be applied when new indices are created. The templates include both settings and -mappings, and a simple pattern template that controls whether the template -should be applied to the new index. +mappings, and a list of patterns that control whether the template should be +applied to the new index. NOTE: Templates are only applied at index creation time. Changing a template will have no impact on existing indices. @@ -15,7 +15,7 @@ For example: -------------------------------------------------- PUT _template/template_1 { - "template": "te*", + "index_patterns": ["te*", "bar*"], "settings": { "number_of_shards": 1 }, @@ -53,7 +53,7 @@ It is also possible to include aliases in an index template as follows: -------------------------------------------------- PUT _template/template_1 { - "template" : "te*", + "index_patterns" : ["te*"], "settings" : { "number_of_shards" : 1 }, @@ -147,7 +147,7 @@ orders overriding them. For example: -------------------------------------------------- PUT /_template/template_1 { - "template" : "*", + "index_patterns" : ["*"], "order" : 0, "settings" : { "number_of_shards" : 1 @@ -161,7 +161,7 @@ PUT /_template/template_1 PUT /_template/template_2 { - "template" : "te*", + "index_patterns" : ["te*"], "order" : 1, "settings" : { "number_of_shards" : 1 @@ -196,7 +196,7 @@ one. -------------------------------------------------- PUT /_template/template_1 { - "template" : "*", + "index_patterns" : ["*"], "order" : 0, "settings" : { "number_of_shards" : 1 diff --git a/docs/reference/mapping/dynamic-mapping.asciidoc b/docs/reference/mapping/dynamic-mapping.asciidoc index adc7d8675c4..99dcf1339e8 100644 --- a/docs/reference/mapping/dynamic-mapping.asciidoc +++ b/docs/reference/mapping/dynamic-mapping.asciidoc @@ -61,7 +61,7 @@ Automatic type creation can also be disabled for all indices by setting an index -------------------------------------------------- PUT _template/template_all { - "template": "*", + "index_patterns": ["*"], "order":0, "settings": { "index.mapper.dynamic": false <1> diff --git a/docs/reference/mapping/dynamic/default-mapping.asciidoc b/docs/reference/mapping/dynamic/default-mapping.asciidoc index 268ae4f29c7..8fcc9114dfc 100644 --- a/docs/reference/mapping/dynamic/default-mapping.asciidoc +++ b/docs/reference/mapping/dynamic/default-mapping.asciidoc @@ -44,7 +44,7 @@ within automatically created indices: -------------------------------------------------- PUT _template/logging { - "template": "logs-*", <1> + "index_patterns": ["logs-*"], <1> "settings": { "number_of_shards": 1 }, <2> "mappings": { "_default_": { diff --git a/docs/reference/mapping/dynamic/templates.asciidoc b/docs/reference/mapping/dynamic/templates.asciidoc index 468df64b1d4..dde3defff67 100644 --- a/docs/reference/mapping/dynamic/templates.asciidoc +++ b/docs/reference/mapping/dynamic/templates.asciidoc @@ -407,7 +407,7 @@ new indices, you could create the following index template: PUT _template/disable_all_field { "order": 0, - "template": "*", <1> + "index_patterns": ["*"], <1> "mappings": { "_default_": { <2> "_all": { <3> diff --git a/docs/reference/migration/migrate_6_0.asciidoc b/docs/reference/migration/migrate_6_0.asciidoc index 8a23fd0f373..cfe9f71f80e 100644 --- a/docs/reference/migration/migrate_6_0.asciidoc +++ b/docs/reference/migration/migrate_6_0.asciidoc @@ -24,12 +24,16 @@ way to reindex old indices is to use the `reindex` API. [float] === Also see: +* <> * <> * <> * <> * <> * <> * <> +* <> + +include::migrate_6_0/cat.asciidoc[] include::migrate_6_0/rest.asciidoc[] @@ -42,3 +46,5 @@ include::migrate_6_0/cluster.asciidoc[] include::migrate_6_0/settings.asciidoc[] include::migrate_6_0/plugins.asciidoc[] + +include::migrate_6_0/indices.asciidoc[] diff --git a/docs/reference/migration/migrate_6_0/cat.asciidoc b/docs/reference/migration/migrate_6_0/cat.asciidoc new file mode 100644 index 00000000000..013c0705991 --- /dev/null +++ b/docs/reference/migration/migrate_6_0/cat.asciidoc @@ -0,0 +1,7 @@ +[[breaking_60_cat_changes]] +=== Cat API changes + +==== Unbounded queue size in cat thread pool + +Previously if a queue size backing a thread pool was unbounded, the cat thread pool API would output an empty string in +the queue_size column. This has been changed to now output -1 so that the output is always present and always numeric. diff --git a/docs/reference/migration/migrate_6_0/indices.asciidoc b/docs/reference/migration/migrate_6_0/indices.asciidoc new file mode 100644 index 00000000000..be726ce155a --- /dev/null +++ b/docs/reference/migration/migrate_6_0/indices.asciidoc @@ -0,0 +1,29 @@ +[[breaking_60_indices_changes]] +=== Templates changes + +==== `template` is now `index_patterns` + +Previously templates expressed the indices that they should match using a glob +style pattern in the `template` field. They should now use the `index_patterns` +field instead. As the name implies you can define multiple glob style patterns +in an array but for convenience defining a single pattern as a bare string is +also supported. So both of these examples are valid: + +[source,js] +-------------------------------------------------- +PUT _template/template_1 +{ + "index_patterns": ["te*", "bar*"], + "settings": { + "number_of_shards": 1 + } +} +PUT _template/template_2 +{ + "index_patterns": "te*", + "settings": { + "number_of_shards": 1 + } +} +-------------------------------------------------- +// CONSOLE diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index b921edca731..5ca8acf37cf 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -8,7 +8,7 @@ then you have a cluster of one node. Every node in the cluster can handle <> and <> traffic by default. The transport layer -is used exclusively for communication between nodes and between nodes and the +is used exclusively for communication between nodes and the {javaclient}/transport-client.html[Java `TransportClient`]; the HTTP layer is used only by external REST clients. diff --git a/docs/reference/modules/scripting/native.asciidoc b/docs/reference/modules/scripting/native.asciidoc index 9580720d967..37a2eac18cc 100644 --- a/docs/reference/modules/scripting/native.asciidoc +++ b/docs/reference/modules/scripting/native.asciidoc @@ -20,17 +20,11 @@ If you squashed the whole thing into one class it'd look like: [source,java] -------------------------------------------------- -public class MyNativeScriptPlugin extends Plugin { +public class MyNativeScriptPlugin extends Plugin implements ScriptPlugin { + @Override - public String name() { - return "my-native-script"; - } - @Override - public String description() { - return "my native script that does something great"; - } - public void onModule(ScriptModule scriptModule) { - scriptModule.registerScript("my_script", MyNativeScriptFactory.class); + public List getNativeScripts() { + return Collections.singletonList(new MyNativeScriptFactory()); } public static class MyNativeScriptFactory implements NativeScriptFactory { diff --git a/docs/reference/modules/scripting/painless-syntax.asciidoc b/docs/reference/modules/scripting/painless-syntax.asciidoc index 8f280de9946..fa8c1e60aa2 100644 --- a/docs/reference/modules/scripting/painless-syntax.asciidoc +++ b/docs/reference/modules/scripting/painless-syntax.asciidoc @@ -130,6 +130,37 @@ using these characters: |`x` | COMMENTS (aka extended) | `'a' ==~ /a #comment/x` |======================================================================= +[float] +[[painless-deref]] +=== Dereferences + +Like lots of languages, Painless uses `.` to reference fields and call methods: + +[source,painless] +--------------------------------------------------------- +String foo = 'foo'; +TypeWithGetterOrPublicField bar = new TypeWithGetterOrPublicField() +return foo.length() + bar.x +--------------------------------------------------------- + +Like Groovy, Painless uses `?.` to perform null-safe references, with the +result being `null` if the left hand side is null: + +[source,painless] +--------------------------------------------------------- +String foo = null; +return foo?.length() // Returns null +--------------------------------------------------------- + +Unlike Groovy, Painless doesn't support writing to null values with this +operator: + +[source,painless] +--------------------------------------------------------- +TypeWithSetterOrPublicField foo = null; +foo?.x = 'bar' // Compile error +--------------------------------------------------------- + [float] [[painless-operators]] === Operators diff --git a/docs/reference/query-dsl/simple-query-string-query.asciidoc b/docs/reference/query-dsl/simple-query-string-query.asciidoc index c6f70c31416..c67ba5cd73e 100644 --- a/docs/reference/query-dsl/simple-query-string-query.asciidoc +++ b/docs/reference/query-dsl/simple-query-string-query.asciidoc @@ -61,6 +61,10 @@ based just on the prefix of a term. Defaults to `false`. the query string. This allows to use a field that has a different analysis chain for exact matching. Look <> for a comprehensive example. + +|`all_fields` | Perform the query on all fields detected in the mapping that can +be queried. Will be used by default when the `_all` field is disabled and no +`default_field` is specified index settings, and no `fields` are specified. |======================================================================= [float] @@ -85,8 +89,10 @@ When not explicitly specifying the field to search on in the query string syntax, the `index.query.default_field` will be used to derive which field to search on. It defaults to `_all` field. -So, if `_all` field is disabled, it might make sense to change it to set -a different default field. +If the `_all` field is disabled and no `fields` are specified in the request`, +the `simple_query_string` query will automatically attempt to determine the +existing fields in the index's mapping that are queryable, and perform the +search on those fields. [float] ==== Multi Field diff --git a/docs/reference/setup/bootstrap-checks.asciidoc b/docs/reference/setup/bootstrap-checks.asciidoc index 1d6a3cf242a..75eeab65036 100644 --- a/docs/reference/setup/bootstrap-checks.asciidoc +++ b/docs/reference/setup/bootstrap-checks.asciidoc @@ -56,7 +56,7 @@ File descriptors are a Unix construct for tracking open "files". In Unix though, https://en.wikipedia.org/wiki/Everything_is_a_file[everything is a file]. For example, "files" could be a physical file, a virtual file (e.g., `/proc/loadavg`), or network sockets. Elasticsearch requires -lots file descriptors (e.g., every shard is composed of multiple +lots of file descriptors (e.g., every shard is composed of multiple segments and other files, plus connections to other nodes, etc.). This bootstrap check is enforced on OS X and Linux. To pass the file descriptor check, you might have to configure < implement while (parser.nextToken() != XContentParser.Token.END_OBJECT) { parseMissingAndAdd(aggregationName, currentFieldName, parser, missingMap); } - } else if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) { + } else if (context.getParseFieldMatcher().match(currentFieldName, Script.SCRIPT_PARSE_FIELD)) { throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " + "Multi-field aggregations do not support scripts."); @@ -121,7 +121,7 @@ public abstract class MultiValuesSourceParser implement "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); } } else if (token == XContentParser.Token.START_ARRAY) { - if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) { + if (context.getParseFieldMatcher().match(currentFieldName, Script.SCRIPT_PARSE_FIELD)) { throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " + "Multi-field aggregations do not support scripts."); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java index 160fd445e43..7828c5f67fb 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java @@ -59,7 +59,7 @@ public final class ScriptProcessor extends AbstractProcessor { @Override public void execute(IngestDocument document) { - ExecutableScript executableScript = scriptService.executable(script, ScriptContext.Standard.INGEST, emptyMap()); + ExecutableScript executableScript = scriptService.executable(script, ScriptContext.Standard.INGEST); executableScript.setNextVar("ctx", document.getSourceAndMetadata()); executableScript.run(); } @@ -82,6 +82,7 @@ public final class ScriptProcessor extends AbstractProcessor { } @Override + @SuppressWarnings("unchecked") public ScriptProcessor create(Map registry, String processorTag, Map config) throws Exception { String lang = readOptionalStringProperty(TYPE, processorTag, config, "lang"); @@ -101,17 +102,21 @@ public final class ScriptProcessor extends AbstractProcessor { throw newConfigurationException(TYPE, processorTag, null, "Only one of [file], [id], or [inline] may be configured"); } - if(params == null) { + if (lang == null) { + lang = Script.DEFAULT_SCRIPT_LANG; + } + + if (params == null) { params = emptyMap(); } final Script script; if (Strings.hasLength(file)) { - script = new Script(file, FILE, lang, params); + script = new Script(FILE, lang, file, (Map)params); } else if (Strings.hasLength(inline)) { - script = new Script(inline, INLINE, lang, params); + script = new Script(INLINE, lang, inline, (Map)params); } else if (Strings.hasLength(id)) { - script = new Script(id, STORED, lang, params); + script = new Script(STORED, lang, id, (Map)params); } else { throw newConfigurationException(TYPE, processorTag, null, "Could not initialize script"); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java index c32b0f101a0..d59da982d2e 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java @@ -48,7 +48,7 @@ public class ScriptProcessorTests extends ESTestCase { ScriptService scriptService = mock(ScriptService.class); Script script = new Script("_script"); ExecutableScript executableScript = mock(ExecutableScript.class); - when(scriptService.executable(any(), any(), any())).thenReturn(executableScript); + when(scriptService.executable(any(Script.class), any())).thenReturn(executableScript); Map document = new HashMap<>(); document.put("bytes_in", randomInt()); diff --git a/modules/lang-expression/licenses/lucene-expressions-6.3.0-snapshot-a66a445.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index aadc6a31524..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0bf61de45f8ea73a185d48572ea094f6b696a7a8 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-6.3.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.3.0.jar.sha1 new file mode 100644 index 00000000000..8fca696518d --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-6.3.0.jar.sha1 @@ -0,0 +1 @@ +f9847cdbdd355f9f96993c4c322d6b453f4e84a8 \ No newline at end of file diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/IndexedExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/IndexedExpressionTests.java index d8eeeed8d5b..71c87937618 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/IndexedExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/IndexedExpressionTests.java @@ -58,7 +58,7 @@ public class IndexedExpressionTests extends ESIntegTestCase { client().prepareIndex("test", "scriptTest", "1").setSource("{\"theField\":\"foo\"}").get(); try { client().prepareUpdate("test", "scriptTest", "1") - .setScript(new Script("script1", ScriptType.STORED, ExpressionScriptEngineService.NAME, null)).get(); + .setScript(new Script(ScriptType.STORED, ExpressionScriptEngineService.NAME, "script1", Collections.emptyMap())).get(); fail("update script should have been rejected"); } catch(Exception e) { assertThat(e.getMessage(), containsString("failed to execute script")); @@ -67,7 +67,7 @@ public class IndexedExpressionTests extends ESIntegTestCase { try { client().prepareSearch() .setSource( - new SearchSourceBuilder().scriptField("test1", new Script("script1", ScriptType.STORED, "expression", null))) + new SearchSourceBuilder().scriptField("test1", new Script(ScriptType.STORED, "expression", "script1", Collections.emptyMap()))) .setIndices("test").setTypes("scriptTest").get(); fail("search script should have been rejected"); } catch(Exception e) { @@ -77,7 +77,7 @@ public class IndexedExpressionTests extends ESIntegTestCase { client().prepareSearch("test") .setSource( new SearchSourceBuilder().aggregation(AggregationBuilders.terms("test").script( - new Script("script1", ScriptType.STORED, "expression", null)))).get(); + new Script(ScriptType.STORED, "expression", "script1", Collections.emptyMap())))).get(); } catch (Exception e) { assertThat(e.toString(), containsString("scripts of type [stored], operation [aggs] and lang [expression] are disabled")); } diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index b0c97fcb5c0..aa78a9e98ec 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -85,7 +85,7 @@ public class MoreExpressionTests extends ESIntegTestCase { req.setQuery(QueryBuilders.matchAllQuery()) .addSort(SortBuilders.fieldSort("_uid") .order(SortOrder.ASC)) - .addScriptField("foo", new Script(script, ScriptType.INLINE, "expression", paramsMap)); + .addScriptField("foo", new Script(ScriptType.INLINE, "expression", script, paramsMap)); return req; } @@ -124,7 +124,7 @@ public class MoreExpressionTests extends ESIntegTestCase { client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"), client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"), client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye")); - ScoreFunctionBuilder score = ScoreFunctionBuilders.scriptFunction(new Script("1 / _score", ScriptType.INLINE, "expression", null)); + ScoreFunctionBuilder score = ScoreFunctionBuilders.scriptFunction(new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap())); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE)); req.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); // make sure DF is consistent @@ -429,13 +429,16 @@ public class MoreExpressionTests extends ESIntegTestCase { req.setQuery(QueryBuilders.matchAllQuery()) .addAggregation( AggregationBuilders.stats("int_agg").field("x") - .script(new Script("_value * 3", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null))) + .script(new Script(ScriptType.INLINE, + ExpressionScriptEngineService.NAME, "_value * 3", Collections.emptyMap()))) .addAggregation( AggregationBuilders.stats("double_agg").field("y") - .script(new Script("_value - 1.1", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null))) + .script(new Script(ScriptType.INLINE, + ExpressionScriptEngineService.NAME, "_value - 1.1", Collections.emptyMap()))) .addAggregation( AggregationBuilders.stats("const_agg").field("x") // specifically to test a script w/o _value - .script(new Script("3.0", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null)) + .script(new Script(ScriptType.INLINE, + ExpressionScriptEngineService.NAME, "3.0", Collections.emptyMap())) ); SearchResponse rsp = req.get(); @@ -469,7 +472,8 @@ public class MoreExpressionTests extends ESIntegTestCase { req.setQuery(QueryBuilders.matchAllQuery()) .addAggregation( AggregationBuilders.terms("term_agg").field("text") - .script(new Script("_value", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null))); + .script( + new Script(ScriptType.INLINE, ExpressionScriptEngineService.NAME, "_value", Collections.emptyMap()))); String message; try { @@ -559,7 +563,7 @@ public class MoreExpressionTests extends ESIntegTestCase { UpdateRequestBuilder urb = client().prepareUpdate().setIndex("test_index"); urb.setType("doc"); urb.setId("1"); - urb.setScript(new Script("0", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null)); + urb.setScript(new Script(ScriptType.INLINE, ExpressionScriptEngineService.NAME, "0", Collections.emptyMap())); urb.get(); fail("Expression scripts should not be allowed to run as update scripts."); } catch (Exception e) { @@ -590,7 +594,8 @@ public class MoreExpressionTests extends ESIntegTestCase { .subAggregation(sum("threeSum").field("three")) .subAggregation(sum("fourSum").field("four")) .subAggregation(bucketScript("totalSum", - new Script("_value0 + _value1 + _value2", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null), + new Script(ScriptType.INLINE, + ExpressionScriptEngineService.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), "twoSum", "threeSum", "fourSum"))) .execute().actionGet(); diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index a762720ff9d..0cd8976c76c 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -30,7 +30,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.codehaus.groovy.ast.ClassCodeExpressionTransformer; import org.codehaus.groovy.ast.ClassNode; -import org.codehaus.groovy.ast.Parameter; import org.codehaus.groovy.ast.expr.ConstantExpression; import org.codehaus.groovy.ast.expr.Expression; import org.codehaus.groovy.classgen.GeneratorContext; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyIndexedScriptTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyIndexedScriptTests.java index e273b52dad0..ce362e8d78c 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyIndexedScriptTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyIndexedScriptTests.java @@ -90,9 +90,10 @@ public class GroovyIndexedScriptTests extends ESIntegTestCase { .prepareSearch() .setSource( new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).size(1) - .scriptField("test1", new Script("script1", ScriptType.STORED, GroovyScriptEngineService.NAME, null)) + .scriptField("test1", + new Script(ScriptType.STORED, GroovyScriptEngineService.NAME, "script1", Collections.emptyMap())) .scriptField("test2", - new Script("script2", ScriptType.STORED, GroovyScriptEngineService.NAME, script2Params))) + new Script(ScriptType.STORED, GroovyScriptEngineService.NAME, "script2", script2Params))) .setIndices("test").setTypes("scriptTest").get(); assertHitCount(searchResponse, 5); assertTrue(searchResponse.getHits().hits().length == 1); @@ -118,7 +119,7 @@ public class GroovyIndexedScriptTests extends ESIntegTestCase { .prepareSearch() .setSource( new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).scriptField("test_field", - new Script("script1", ScriptType.STORED, GroovyScriptEngineService.NAME, null))) + new Script(ScriptType.STORED, GroovyScriptEngineService.NAME, "script1", Collections.emptyMap()))) .setIndices("test_index") .setTypes("test_type").get(); assertHitCount(searchResponse, 1); @@ -135,7 +136,7 @@ public class GroovyIndexedScriptTests extends ESIntegTestCase { client().prepareIndex("test", "scriptTest", "1").setSource("{\"theField\":\"foo\"}").get(); try { client().prepareUpdate("test", "scriptTest", "1") - .setScript(new Script("script1", ScriptType.STORED, GroovyScriptEngineService.NAME, null)).get(); + .setScript(new Script(ScriptType.STORED, GroovyScriptEngineService.NAME, "script1", Collections.emptyMap())).get(); fail("update script should have been rejected"); } catch (Exception e) { assertThat(e.getMessage(), containsString("failed to execute script")); @@ -156,7 +157,7 @@ public class GroovyIndexedScriptTests extends ESIntegTestCase { .prepareSearch("test") .setSource( new SearchSourceBuilder().aggregation(AggregationBuilders.terms("test").script( - new Script("script1", ScriptType.STORED, GroovyScriptEngineService.NAME, null)))).get(); + new Script(ScriptType.STORED, GroovyScriptEngineService.NAME, "script1", Collections.emptyMap())))).get(); assertHitCount(searchResponse, 1); assertThat(searchResponse.getAggregations().get("test"), notNullValue()); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java index a706b421975..196d878ae66 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java @@ -68,7 +68,7 @@ public class GroovyScriptTests extends ESIntegTestCase { } public void assertScript(String scriptString) { - Script script = new Script(scriptString, ScriptType.INLINE, GroovyScriptEngineService.NAME, null); + Script script = new Script(ScriptType.INLINE, GroovyScriptEngineService.NAME, scriptString, Collections.emptyMap()); SearchResponse resp = client().prepareSearch("test") .setSource(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).sort(SortBuilders. scriptSort(script, ScriptSortType.NUMBER))) @@ -85,8 +85,8 @@ public class GroovyScriptTests extends ESIntegTestCase { try { client().prepareSearch("test") .setQuery( - constantScoreQuery(scriptQuery(new Script("1 == not_found", ScriptType.INLINE, GroovyScriptEngineService.NAME, - null)))).get(); + constantScoreQuery(scriptQuery(new Script(ScriptType.INLINE, GroovyScriptEngineService.NAME, "1 == not_found", + Collections.emptyMap())))).get(); fail("should have thrown an exception"); } catch (SearchPhaseExecutionException e) { assertThat(e.toString()+ "should not contained NotSerializableTransportException", @@ -100,7 +100,7 @@ public class GroovyScriptTests extends ESIntegTestCase { try { client().prepareSearch("test") .setQuery(constantScoreQuery(scriptQuery( - new Script("null.foo", ScriptType.INLINE, GroovyScriptEngineService.NAME, null)))).get(); + new Script(ScriptType.INLINE, GroovyScriptEngineService.NAME, "null.foo", Collections.emptyMap())))).get(); fail("should have thrown an exception"); } catch (SearchPhaseExecutionException e) { assertThat(e.toString() + "should not contained NotSerializableTransportException", @@ -120,7 +120,7 @@ public class GroovyScriptTests extends ESIntegTestCase { // doc[] access SearchResponse resp = client().prepareSearch("test").setQuery(functionScoreQuery(scriptFunction( - new Script("doc['bar'].value", ScriptType.INLINE, GroovyScriptEngineService.NAME, null))) + new Script(ScriptType.INLINE, GroovyScriptEngineService.NAME, "doc['bar'].value", Collections.emptyMap()))) .boostMode(CombineFunction.REPLACE)).get(); assertNoFailures(resp); @@ -135,7 +135,7 @@ public class GroovyScriptTests extends ESIntegTestCase { // _score can be accessed SearchResponse resp = client().prepareSearch("test").setQuery(functionScoreQuery(matchQuery("foo", "dog"), - scriptFunction(new Script("_score", ScriptType.INLINE, GroovyScriptEngineService.NAME, null))) + scriptFunction(new Script(ScriptType.INLINE, GroovyScriptEngineService.NAME, "_score", Collections.emptyMap()))) .boostMode(CombineFunction.REPLACE)).get(); assertNoFailures(resp); assertSearchHits(resp, "3", "1"); @@ -147,7 +147,8 @@ public class GroovyScriptTests extends ESIntegTestCase { .prepareSearch("test") .setQuery( functionScoreQuery(matchQuery("foo", "dog"), scriptFunction( - new Script("_score > 0.0 ? _score : 0", ScriptType.INLINE, GroovyScriptEngineService.NAME, null))) + new Script(ScriptType.INLINE, + GroovyScriptEngineService.NAME, "_score > 0.0 ? _score : 0", Collections.emptyMap()))) .boostMode(CombineFunction.REPLACE)).get(); assertNoFailures(resp); assertSearchHits(resp, "3", "1"); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java index 95bae3732e5..662cd86f799 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java @@ -30,7 +30,6 @@ import com.github.mustachejava.TemplateContext; import com.github.mustachejava.codes.DefaultMustache; import com.github.mustachejava.codes.IterableCode; import com.github.mustachejava.codes.WriteCode; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -54,6 +53,7 @@ public class CustomMustacheFactory extends DefaultMustacheFactory { static final String CONTENT_TYPE_PARAM = "content_type"; + static final String JSON_MIME_TYPE_WITH_CHARSET = "application/json; charset=UTF-8"; static final String JSON_MIME_TYPE = "application/json"; static final String PLAIN_TEXT_MIME_TYPE = "text/plain"; static final String X_WWW_FORM_URLENCODED_MIME_TYPE = "application/x-www-form-urlencoded"; @@ -63,6 +63,7 @@ public class CustomMustacheFactory extends DefaultMustacheFactory { private static final Map> ENCODERS; static { Map> encoders = new HashMap<>(); + encoders.put(JSON_MIME_TYPE_WITH_CHARSET, JsonEscapeEncoder::new); encoders.put(JSON_MIME_TYPE, JsonEscapeEncoder::new); encoders.put(PLAIN_TEXT_MIME_TYPE, DefaultEncoder::new); encoders.put(X_WWW_FORM_URLENCODED_MIME_TYPE, UrlEncoder::new); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java index b1db44defa0..93549d1d791 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java @@ -33,7 +33,7 @@ import java.util.List; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class MultiSearchTemplateRequest extends ActionRequest implements CompositeIndicesRequest { +public class MultiSearchTemplateRequest extends ActionRequest implements CompositeIndicesRequest { private List requests = new ArrayList<>(); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index 170070564f9..a5a0ded3bee 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -42,7 +42,7 @@ public class MustachePlugin extends Plugin implements ScriptPlugin, ActionPlugin } @Override - public List, ? extends ActionResponse>> getActions() { + public List> getActions() { return Arrays.asList(new ActionHandler<>(SearchTemplateAction.INSTANCE, TransportSearchTemplateAction.class), new ActionHandler<>(MultiSearchTemplateAction.INSTANCE, TransportMultiSearchTemplateAction.class)); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java index b2bc514327c..08c0e1643bc 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java @@ -83,8 +83,7 @@ public final class MustacheScriptEngineService extends AbstractComponent impleme * Compile a template string to (in this case) a Mustache object than can * later be re-used for execution to fill in missing parameter values. * - * @param templateSource - * a string representing the template to compile. + * @param templateSource a string representing the template to compile. * @return a compiled template object for later execution. * */ @Override diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java index d7ac37f8313..b405d0950e7 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java @@ -37,7 +37,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request to execute a search based on a search template. */ -public class SearchTemplateRequest extends ActionRequest implements IndicesRequest { +public class SearchTemplateRequest extends ActionRequest implements IndicesRequest { private SearchRequest request; private boolean simulate = false; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java index 731659c4b72..3744416a4ef 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java @@ -39,6 +39,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import java.io.IOException; +import java.util.Collections; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -57,11 +58,12 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder params) { - this(new Script(template, scriptType, "mustache", params)); + this(new Script(scriptType, "mustache", template, params)); } public TemplateQueryBuilder(String template, ScriptType scriptType, Map params, XContentType ct) { - this(new Script(template, scriptType, "mustache", params, ct)); + this(new Script(scriptType, "mustache", template, + ct == null ? Collections.emptyMap() : Collections.singletonMap(Script.CONTENT_TYPE_OPTION, ct.mediaType()), params)); } TemplateQueryBuilder(Script template) { diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index f323fcf450f..e366b34e3d0 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -40,7 +40,8 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import static java.util.Collections.emptyMap; +import java.util.Collections; + import static org.elasticsearch.script.ScriptContext.Standard.SEARCH; public class TransportSearchTemplateAction extends HandledTransportAction { @@ -66,8 +67,9 @@ public class TransportSearchTemplateAction extends HandledTransportAction listener) { final SearchTemplateResponse response = new SearchTemplateResponse(); try { - Script script = new Script(request.getScript(), request.getScriptType(), TEMPLATE_LANG, request.getScriptParams()); - ExecutableScript executable = scriptService.executable(script, SEARCH, emptyMap()); + Script script = new Script(request.getScriptType(), TEMPLATE_LANG, request.getScript(), + request.getScriptParams() == null ? Collections.emptyMap() : request.getScriptParams()); + ExecutableScript executable = scriptService.executable(script, SEARCH); BytesReference source = (BytesReference) executable.run(); response.setSource(source); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/CustomMustacheFactoryTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/CustomMustacheFactoryTests.java index f2f0d6d80fd..c1cba140ff9 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/CustomMustacheFactoryTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/CustomMustacheFactoryTests.java @@ -51,6 +51,8 @@ public class CustomMustacheFactoryTests extends ESTestCase { e = expectThrows(IllegalArgumentException.class, () -> CustomMustacheFactory.createEncoder("test")); assertThat(e.getMessage(), equalTo("No encoder found for MIME type [test]")); + assertThat(CustomMustacheFactory.createEncoder(CustomMustacheFactory.JSON_MIME_TYPE_WITH_CHARSET), + instanceOf(CustomMustacheFactory.JsonEscapeEncoder.class)); assertThat(CustomMustacheFactory.createEncoder(CustomMustacheFactory.JSON_MIME_TYPE), instanceOf(CustomMustacheFactory.JsonEscapeEncoder.class)); assertThat(CustomMustacheFactory.createEncoder(CustomMustacheFactory.PLAIN_TEXT_MIME_TYPE), diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index ca4a6de7166..20211e3935f 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -84,7 +84,7 @@ public class MustacheScriptEngineTests extends ESTestCase { XContentParser parser = XContentFactory.xContent(templateString).createParser(templateString); Script script = Script.parse(parser, new ParseFieldMatcher(false)); CompiledScript compiledScript = new CompiledScript(ScriptType.INLINE, null, "mustache", - qe.compile(null, script.getScript(), Collections.emptyMap())); + qe.compile(null, script.getIdOrCode(), Collections.emptyMap())); ExecutableScript executableScript = qe.executable(compiledScript, script.getParams()); assertThat(((BytesReference) executableScript.run()).utf8ToString(), equalTo("{\"match_all\":{}}")); } @@ -95,7 +95,7 @@ public class MustacheScriptEngineTests extends ESTestCase { XContentParser parser = XContentFactory.xContent(templateString).createParser(templateString); Script script = Script.parse(parser, new ParseFieldMatcher(false)); CompiledScript compiledScript = new CompiledScript(ScriptType.INLINE, null, "mustache", - qe.compile(null, script.getScript(), Collections.emptyMap())); + qe.compile(null, script.getIdOrCode(), Collections.emptyMap())); ExecutableScript executableScript = qe.executable(compiledScript, script.getParams()); assertThat(((BytesReference) executableScript.run()).utf8ToString(), equalTo("{ \"match_all\":{} }")); } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java index 41242a693dd..7bff3f59842 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.script.mustache; import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -50,6 +49,8 @@ import java.util.HashMap; import java.util.Map; import java.util.function.Function; +import static org.hamcrest.Matchers.containsString; + public class TemplateQueryBuilderTests extends AbstractQueryTestCase { /** @@ -114,7 +115,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase>>='; OCTAL: '0' [0-7]+ [lL]?; HEX: '0' [xX] [0-9a-fA-F]+ [lL]?; INTEGER: ( '0' | [1-9] [0-9]* ) [lLfFdD]?; -DECIMAL: ( '0' | [1-9] [0-9]* ) (DOT [0-9]+)? ( [eE] [+\-]? [0-9]+ )? [fF]?; +DECIMAL: ( '0' | [1-9] [0-9]* ) (DOT [0-9]+)? ( [eE] [+\-]? [0-9]+ )? [fFdD]?; STRING: ( '"' ( '\\"' | '\\\\' | ~[\\"] )*? '"' ) | ( '\'' ( '\\\'' | '\\\\' | ~[\\"] )*? '\'' ); REGEX: '/' ( ~('/' | '\n') | '\\' ~'\n' )+ '/' [cilmsUux]* { SlashStrategy.slashIsRegex(this) }?; diff --git a/modules/lang-painless/src/main/antlr/PainlessParser.g4 b/modules/lang-painless/src/main/antlr/PainlessParser.g4 index 42876b18f0d..cca96e65b8b 100644 --- a/modules/lang-painless/src/main/antlr/PainlessParser.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessParser.g4 @@ -156,11 +156,11 @@ postdot ; callinvoke - : DOT DOTID arguments + : COND? DOT DOTID arguments ; fieldaccess - : DOT ( DOTID | DOTINTEGER ) + : COND? DOT ( DOTID | DOTINTEGER ) ; braceaccess diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index 864b44fb766..b3c1a3caea1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -30,6 +30,9 @@ import org.elasticsearch.painless.Definition.Type; public final class AnalyzerCaster { public static Cast getLegalCast(Location location, Type actual, Type expected, boolean explicit, boolean internal) { + if (actual == null || expected == null) { + throw new IllegalStateException("Neither actual [" + actual + "] nor expected [" + expected + "] can be null"); + } if (actual.equals(expected)) { return null; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java index eae7150b9be..54e164c37c3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java @@ -204,21 +204,21 @@ class PainlessLexer extends Lexer { "~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008aF\u008cG\u008eH\u0090I\u0092"+ "J\u0094K\u0096L\u0098M\u009aN\u009cO\u009eP\u00a0Q\u00a2R\u00a4S\u00a6"+ "T\4\2\3\24\5\2\13\f\17\17\"\"\4\2\f\f\17\17\3\2\629\4\2NNnn\4\2ZZzz\5"+ - "\2\62;CHch\3\2\63;\3\2\62;\b\2FFHHNNffhhnn\4\2GGgg\4\2--//\4\2HHhh\4\2"+ - "$$^^\4\2\f\f\61\61\3\2\f\f\t\2WWeekknouuwwzz\5\2C\\aac|\6\2\62;C\\aac"+ - "|\u026b\2\4\3\2\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2"+ - "\16\3\2\2\2\2\20\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30\3"+ - "\2\2\2\2\32\3\2\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2\2"+ - "$\3\2\2\2\2&\3\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60"+ - "\3\2\2\2\2\62\3\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2"+ - "\2<\3\2\2\2\2>\3\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H"+ - "\3\2\2\2\2J\3\2\2\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2"+ - "\2\2\2V\3\2\2\2\2X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2"+ - "\2\2b\3\2\2\2\2d\3\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2"+ - "n\3\2\2\2\2p\3\2\2\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3"+ - "\2\2\2\2|\3\2\2\2\2~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3"+ - "\2\2\2\2\u0086\3\2\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2"+ - "\2\u008e\3\2\2\2\2\u0090\3\2\2\2\2\u0092\3\2\2\2\2\u0094\3\2\2\2\2\u0096"+ + "\2\62;CHch\3\2\63;\3\2\62;\b\2FFHHNNffhhnn\4\2GGgg\4\2--//\6\2FFHHffh"+ + "h\4\2$$^^\4\2\f\f\61\61\3\2\f\f\t\2WWeekknouuwwzz\5\2C\\aac|\6\2\62;C"+ + "\\aac|\u026b\2\4\3\2\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2"+ + "\2\2\16\3\2\2\2\2\20\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2"+ + "\30\3\2\2\2\2\32\3\2\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2"+ + "\2\2\2$\3\2\2\2\2&\3\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2"+ + "\2\60\3\2\2\2\2\62\3\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2"+ + "\2\2\2<\3\2\2\2\2>\3\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2"+ + "\2H\3\2\2\2\2J\3\2\2\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T"+ + "\3\2\2\2\2V\3\2\2\2\2X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3"+ + "\2\2\2\2b\3\2\2\2\2d\3\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2"+ + "\2\2n\3\2\2\2\2p\3\2\2\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2"+ + "z\3\2\2\2\2|\3\2\2\2\2~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084"+ + "\3\2\2\2\2\u0086\3\2\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2"+ + "\2\2\u008e\3\2\2\2\2\u0090\3\2\2\2\2\u0092\3\2\2\2\2\u0094\3\2\2\2\2\u0096"+ "\3\2\2\2\2\u0098\3\2\2\2\2\u009a\3\2\2\2\2\u009c\3\2\2\2\2\u009e\3\2\2"+ "\2\2\u00a0\3\2\2\2\2\u00a2\3\2\2\2\3\u00a4\3\2\2\2\3\u00a6\3\2\2\2\4\u00a9"+ "\3\2\2\2\6\u00c4\3\2\2\2\b\u00c8\3\2\2\2\n\u00ca\3\2\2\2\f\u00cc\3\2\2"+ diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java index 8766dc9f89c..964ef714838 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java @@ -2558,6 +2558,7 @@ class PainlessParser extends Parser { public ArgumentsContext arguments() { return getRuleContext(ArgumentsContext.class,0); } + public TerminalNode COND() { return getToken(PainlessParser.COND, 0); } public CallinvokeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -2572,14 +2573,24 @@ class PainlessParser extends Parser { public final CallinvokeContext callinvoke() throws RecognitionException { CallinvokeContext _localctx = new CallinvokeContext(_ctx, getState()); enterRule(_localctx, 40, RULE_callinvoke); + int _la; try { enterOuterAlt(_localctx, 1); { - setState(354); - match(DOT); setState(355); + _la = _input.LA(1); + if (_la==COND) { + { + setState(354); + match(COND); + } + } + + setState(357); + match(DOT); + setState(358); match(DOTID); - setState(356); + setState(359); arguments(); } } @@ -2598,6 +2609,7 @@ class PainlessParser extends Parser { public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } public TerminalNode DOTID() { return getToken(PainlessParser.DOTID, 0); } public TerminalNode DOTINTEGER() { return getToken(PainlessParser.DOTINTEGER, 0); } + public TerminalNode COND() { return getToken(PainlessParser.COND, 0); } public FieldaccessContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -2616,9 +2628,18 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(358); + setState(362); + _la = _input.LA(1); + if (_la==COND) { + { + setState(361); + match(COND); + } + } + + setState(364); match(DOT); - setState(359); + setState(365); _la = _input.LA(1); if ( !(_la==DOTINTEGER || _la==DOTID) ) { _errHandler.recoverInline(this); @@ -2661,11 +2682,11 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(361); + setState(367); match(LBRACE); - setState(362); + setState(368); expression(0); - setState(363); + setState(369); match(RBRACE); } } @@ -2762,17 +2783,17 @@ class PainlessParser extends Parser { int _la; try { int _alt; - setState(409); - switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { + setState(415); + switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { case 1: _localctx = new NewstandardarrayContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(365); + setState(371); match(NEW); - setState(366); + setState(372); match(TYPE); - setState(371); + setState(377); _errHandler.sync(this); _alt = 1; do { @@ -2780,11 +2801,11 @@ class PainlessParser extends Parser { case 1: { { - setState(367); + setState(373); match(LBRACE); - setState(368); + setState(374); expression(0); - setState(369); + setState(375); match(RBRACE); } } @@ -2792,31 +2813,31 @@ class PainlessParser extends Parser { default: throw new NoViableAltException(this); } - setState(373); + setState(379); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,27,_ctx); + _alt = getInterpreter().adaptivePredict(_input,29,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(382); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + setState(388); + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(375); + setState(381); postdot(); - setState(379); + setState(385); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + _alt = getInterpreter().adaptivePredict(_input,30,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(376); + setState(382); postfix(); } } } - setState(381); + setState(387); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + _alt = getInterpreter().adaptivePredict(_input,30,_ctx); } } break; @@ -2827,67 +2848,67 @@ class PainlessParser extends Parser { _localctx = new NewinitializedarrayContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(384); + setState(390); match(NEW); - setState(385); + setState(391); match(TYPE); - setState(386); + setState(392); match(LBRACE); - setState(387); + setState(393); match(RBRACE); - setState(388); + setState(394); match(LBRACK); - setState(397); + setState(403); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { { - setState(389); + setState(395); expression(0); - setState(394); + setState(400); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(390); + setState(396); match(COMMA); - setState(391); + setState(397); expression(0); } } - setState(396); + setState(402); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(400); + setState(406); _la = _input.LA(1); if (_la==SEMICOLON) { { - setState(399); + setState(405); match(SEMICOLON); } } - setState(402); + setState(408); match(RBRACK); - setState(406); + setState(412); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(403); + setState(409); postfix(); } } } - setState(408); + setState(414); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } } break; @@ -2933,41 +2954,41 @@ class PainlessParser extends Parser { enterRule(_localctx, 48, RULE_listinitializer); int _la; try { - setState(424); - switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + setState(430); + switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(411); - match(LBRACE); - setState(412); - expression(0); setState(417); + match(LBRACE); + setState(418); + expression(0); + setState(423); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(413); + setState(419); match(COMMA); - setState(414); + setState(420); expression(0); } } - setState(419); + setState(425); _errHandler.sync(this); _la = _input.LA(1); } - setState(420); + setState(426); match(RBRACE); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(422); + setState(428); match(LBRACE); - setState(423); + setState(429); match(RBRACE); } break; @@ -3014,43 +3035,43 @@ class PainlessParser extends Parser { enterRule(_localctx, 50, RULE_mapinitializer); int _la; try { - setState(440); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + setState(446); + switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(426); - match(LBRACE); - setState(427); - maptoken(); setState(432); + match(LBRACE); + setState(433); + maptoken(); + setState(438); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(428); + setState(434); match(COMMA); - setState(429); + setState(435); maptoken(); } } - setState(434); + setState(440); _errHandler.sync(this); _la = _input.LA(1); } - setState(435); + setState(441); match(RBRACE); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(437); + setState(443); match(LBRACE); - setState(438); + setState(444); match(COLON); - setState(439); + setState(445); match(RBRACE); } break; @@ -3092,11 +3113,11 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(442); + setState(448); expression(0); - setState(443); + setState(449); match(COLON); - setState(444); + setState(450); expression(0); } } @@ -3143,34 +3164,34 @@ class PainlessParser extends Parser { enterOuterAlt(_localctx, 1); { { - setState(446); + setState(452); match(LP); - setState(455); + setState(461); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << THIS) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { { - setState(447); + setState(453); argument(); - setState(452); + setState(458); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(448); + setState(454); match(COMMA); - setState(449); + setState(455); argument(); } } - setState(454); + setState(460); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(457); + setState(463); match(RP); } } @@ -3211,26 +3232,26 @@ class PainlessParser extends Parser { ArgumentContext _localctx = new ArgumentContext(_ctx, getState()); enterRule(_localctx, 56, RULE_argument); try { - setState(462); - switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { + setState(468); + switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(459); + setState(465); expression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(460); + setState(466); lambda(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(461); + setState(467); funcref(); } break; @@ -3285,58 +3306,58 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(477); + setState(483); switch (_input.LA(1)) { case TYPE: case ID: { - setState(464); + setState(470); lamtype(); } break; case LP: { - setState(465); + setState(471); match(LP); - setState(474); + setState(480); _la = _input.LA(1); if (_la==TYPE || _la==ID) { { - setState(466); + setState(472); lamtype(); - setState(471); + setState(477); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(467); + setState(473); match(COMMA); - setState(468); + setState(474); lamtype(); } } - setState(473); + setState(479); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(476); + setState(482); match(RP); } break; default: throw new NoViableAltException(this); } - setState(479); + setState(485); match(ARROW); - setState(482); + setState(488); switch (_input.LA(1)) { case LBRACK: { - setState(480); + setState(486); block(); } break; @@ -3361,7 +3382,7 @@ class PainlessParser extends Parser { case TYPE: case ID: { - setState(481); + setState(487); expression(0); } break; @@ -3404,16 +3425,16 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(485); + setState(491); _la = _input.LA(1); if (_la==TYPE) { { - setState(484); + setState(490); decltype(); } } - setState(487); + setState(493); match(ID); } } @@ -3492,17 +3513,17 @@ class PainlessParser extends Parser { FuncrefContext _localctx = new FuncrefContext(_ctx, getState()); enterRule(_localctx, 62, RULE_funcref); try { - setState(502); - switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { + setState(508); + switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: _localctx = new ClassfuncrefContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(489); + setState(495); match(TYPE); - setState(490); + setState(496); match(REF); - setState(491); + setState(497); match(ID); } break; @@ -3510,11 +3531,11 @@ class PainlessParser extends Parser { _localctx = new ConstructorfuncrefContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(492); + setState(498); decltype(); - setState(493); + setState(499); match(REF); - setState(494); + setState(500); match(NEW); } break; @@ -3522,11 +3543,11 @@ class PainlessParser extends Parser { _localctx = new CapturingfuncrefContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(496); + setState(502); match(ID); - setState(497); + setState(503); match(REF); - setState(498); + setState(504); match(ID); } break; @@ -3534,11 +3555,11 @@ class PainlessParser extends Parser { _localctx = new LocalfuncrefContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(499); + setState(505); match(THIS); - setState(500); + setState(506); match(REF); - setState(501); + setState(507); match(ID); } break; @@ -3606,7 +3627,7 @@ class PainlessParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3T\u01fb\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3T\u0201\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -3632,173 +3653,176 @@ class PainlessParser extends Parser { "\3\22\7\22\u0140\n\22\f\22\16\22\u0143\13\22\3\22\5\22\u0146\n\22\3\23"+ "\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23"+ "\3\23\3\23\3\23\5\23\u015a\n\23\3\24\3\24\3\24\5\24\u015f\n\24\3\25\3"+ - "\25\5\25\u0163\n\25\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\30\3\30\3\30"+ - "\3\30\3\31\3\31\3\31\3\31\3\31\3\31\6\31\u0176\n\31\r\31\16\31\u0177\3"+ - "\31\3\31\7\31\u017c\n\31\f\31\16\31\u017f\13\31\5\31\u0181\n\31\3\31\3"+ - "\31\3\31\3\31\3\31\3\31\3\31\3\31\7\31\u018b\n\31\f\31\16\31\u018e\13"+ - "\31\5\31\u0190\n\31\3\31\5\31\u0193\n\31\3\31\3\31\7\31\u0197\n\31\f\31"+ - "\16\31\u019a\13\31\5\31\u019c\n\31\3\32\3\32\3\32\3\32\7\32\u01a2\n\32"+ - "\f\32\16\32\u01a5\13\32\3\32\3\32\3\32\3\32\5\32\u01ab\n\32\3\33\3\33"+ - "\3\33\3\33\7\33\u01b1\n\33\f\33\16\33\u01b4\13\33\3\33\3\33\3\33\3\33"+ - "\3\33\5\33\u01bb\n\33\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\7\35\u01c5"+ - "\n\35\f\35\16\35\u01c8\13\35\5\35\u01ca\n\35\3\35\3\35\3\36\3\36\3\36"+ - "\5\36\u01d1\n\36\3\37\3\37\3\37\3\37\3\37\7\37\u01d8\n\37\f\37\16\37\u01db"+ - "\13\37\5\37\u01dd\n\37\3\37\5\37\u01e0\n\37\3\37\3\37\3\37\5\37\u01e5"+ - "\n\37\3 \5 \u01e8\n \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\5!\u01f9"+ - "\n!\3!\2\3\36\"\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62"+ - "\64\668:<>@\2\16\3\3\r\r\3\2\37!\3\2\"#\3\289\3\2$&\3\2\'*\3\2+.\3\2<"+ - "G\3\2:;\4\2\35\36\"#\3\2HK\3\2ST\u0233\2E\3\2\2\2\4P\3\2\2\2\6U\3\2\2"+ - "\2\b\u00bb\3\2\2\2\n\u00bf\3\2\2\2\f\u00c1\3\2\2\2\16\u00ca\3\2\2\2\20"+ - "\u00ce\3\2\2\2\22\u00d0\3\2\2\2\24\u00d2\3\2\2\2\26\u00db\3\2\2\2\30\u00e3"+ - "\3\2\2\2\32\u00e8\3\2\2\2\34\u00ef\3\2\2\2\36\u00f1\3\2\2\2 \u0133\3\2"+ - "\2\2\"\u0145\3\2\2\2$\u0159\3\2\2\2&\u015e\3\2\2\2(\u0162\3\2\2\2*\u0164"+ - "\3\2\2\2,\u0168\3\2\2\2.\u016b\3\2\2\2\60\u019b\3\2\2\2\62\u01aa\3\2\2"+ - "\2\64\u01ba\3\2\2\2\66\u01bc\3\2\2\28\u01c0\3\2\2\2:\u01d0\3\2\2\2<\u01df"+ - "\3\2\2\2>\u01e7\3\2\2\2@\u01f8\3\2\2\2BD\5\4\3\2CB\3\2\2\2DG\3\2\2\2E"+ - "C\3\2\2\2EF\3\2\2\2FK\3\2\2\2GE\3\2\2\2HJ\5\b\5\2IH\3\2\2\2JM\3\2\2\2"+ - "KI\3\2\2\2KL\3\2\2\2LN\3\2\2\2MK\3\2\2\2NO\7\2\2\3O\3\3\2\2\2PQ\5\26\f"+ - "\2QR\7R\2\2RS\5\6\4\2ST\5\f\7\2T\5\3\2\2\2Ua\7\t\2\2VW\5\26\f\2W^\7R\2"+ - "\2XY\7\f\2\2YZ\5\26\f\2Z[\7R\2\2[]\3\2\2\2\\X\3\2\2\2]`\3\2\2\2^\\\3\2"+ - "\2\2^_\3\2\2\2_b\3\2\2\2`^\3\2\2\2aV\3\2\2\2ab\3\2\2\2bc\3\2\2\2cd\7\n"+ - "\2\2d\7\3\2\2\2ef\7\16\2\2fg\7\t\2\2gh\5\36\20\2hi\7\n\2\2im\5\n\6\2j"+ - "k\7\20\2\2kn\5\n\6\2ln\6\5\2\2mj\3\2\2\2ml\3\2\2\2n\u00bc\3\2\2\2op\7"+ - "\21\2\2pq\7\t\2\2qr\5\36\20\2ru\7\n\2\2sv\5\n\6\2tv\5\16\b\2us\3\2\2\2"+ - "ut\3\2\2\2v\u00bc\3\2\2\2wx\7\22\2\2xy\5\f\7\2yz\7\21\2\2z{\7\t\2\2{|"+ - "\5\36\20\2|}\7\n\2\2}~\5\34\17\2~\u00bc\3\2\2\2\177\u0080\7\23\2\2\u0080"+ - "\u0082\7\t\2\2\u0081\u0083\5\20\t\2\u0082\u0081\3\2\2\2\u0082\u0083\3"+ - "\2\2\2\u0083\u0084\3\2\2\2\u0084\u0086\7\r\2\2\u0085\u0087\5\36\20\2\u0086"+ - "\u0085\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0088\3\2\2\2\u0088\u008a\7\r"+ - "\2\2\u0089\u008b\5\22\n\2\u008a\u0089\3\2\2\2\u008a\u008b\3\2\2\2\u008b"+ - "\u008c\3\2\2\2\u008c\u008f\7\n\2\2\u008d\u0090\5\n\6\2\u008e\u0090\5\16"+ - "\b\2\u008f\u008d\3\2\2\2\u008f\u008e\3\2\2\2\u0090\u00bc\3\2\2\2\u0091"+ - "\u0092\7\23\2\2\u0092\u0093\7\t\2\2\u0093\u0094\5\26\f\2\u0094\u0095\7"+ - "R\2\2\u0095\u0096\7\65\2\2\u0096\u0097\5\36\20\2\u0097\u0098\7\n\2\2\u0098"+ - "\u0099\5\n\6\2\u0099\u00bc\3\2\2\2\u009a\u009b\7\23\2\2\u009b\u009c\7"+ - "\t\2\2\u009c\u009d\7R\2\2\u009d\u009e\7\17\2\2\u009e\u009f\5\36\20\2\u009f"+ - "\u00a0\7\n\2\2\u00a0\u00a1\5\n\6\2\u00a1\u00bc\3\2\2\2\u00a2\u00a3\5\24"+ - "\13\2\u00a3\u00a4\5\34\17\2\u00a4\u00bc\3\2\2\2\u00a5\u00a6\7\24\2\2\u00a6"+ - "\u00bc\5\34\17\2\u00a7\u00a8\7\25\2\2\u00a8\u00bc\5\34\17\2\u00a9\u00aa"+ - "\7\26\2\2\u00aa\u00ab\5\36\20\2\u00ab\u00ac\5\34\17\2\u00ac\u00bc\3\2"+ - "\2\2\u00ad\u00ae\7\30\2\2\u00ae\u00b0\5\f\7\2\u00af\u00b1\5\32\16\2\u00b0"+ - "\u00af\3\2\2\2\u00b1\u00b2\3\2\2\2\u00b2\u00b0\3\2\2\2\u00b2\u00b3\3\2"+ - "\2\2\u00b3\u00bc\3\2\2\2\u00b4\u00b5\7\32\2\2\u00b5\u00b6\5\36\20\2\u00b6"+ - "\u00b7\5\34\17\2\u00b7\u00bc\3\2\2\2\u00b8\u00b9\5\36\20\2\u00b9\u00ba"+ - "\5\34\17\2\u00ba\u00bc\3\2\2\2\u00bbe\3\2\2\2\u00bbo\3\2\2\2\u00bbw\3"+ - "\2\2\2\u00bb\177\3\2\2\2\u00bb\u0091\3\2\2\2\u00bb\u009a\3\2\2\2\u00bb"+ - "\u00a2\3\2\2\2\u00bb\u00a5\3\2\2\2\u00bb\u00a7\3\2\2\2\u00bb\u00a9\3\2"+ - "\2\2\u00bb\u00ad\3\2\2\2\u00bb\u00b4\3\2\2\2\u00bb\u00b8\3\2\2\2\u00bc"+ - "\t\3\2\2\2\u00bd\u00c0\5\f\7\2\u00be\u00c0\5\b\5\2\u00bf\u00bd\3\2\2\2"+ - "\u00bf\u00be\3\2\2\2\u00c0\13\3\2\2\2\u00c1\u00c5\7\5\2\2\u00c2\u00c4"+ - "\5\b\5\2\u00c3\u00c2\3\2\2\2\u00c4\u00c7\3\2\2\2\u00c5\u00c3\3\2\2\2\u00c5"+ - "\u00c6\3\2\2\2\u00c6\u00c8\3\2\2\2\u00c7\u00c5\3\2\2\2\u00c8\u00c9\7\6"+ - "\2\2\u00c9\r\3\2\2\2\u00ca\u00cb\7\r\2\2\u00cb\17\3\2\2\2\u00cc\u00cf"+ - "\5\24\13\2\u00cd\u00cf\5\36\20\2\u00ce\u00cc\3\2\2\2\u00ce\u00cd\3\2\2"+ - "\2\u00cf\21\3\2\2\2\u00d0\u00d1\5\36\20\2\u00d1\23\3\2\2\2\u00d2\u00d3"+ - "\5\26\f\2\u00d3\u00d8\5\30\r\2\u00d4\u00d5\7\f\2\2\u00d5\u00d7\5\30\r"+ - "\2\u00d6\u00d4\3\2\2\2\u00d7\u00da\3\2\2\2\u00d8\u00d6\3\2\2\2\u00d8\u00d9"+ - "\3\2\2\2\u00d9\25\3\2\2\2\u00da\u00d8\3\2\2\2\u00db\u00e0\7Q\2\2\u00dc"+ - "\u00dd\7\7\2\2\u00dd\u00df\7\b\2\2\u00de\u00dc\3\2\2\2\u00df\u00e2\3\2"+ - "\2\2\u00e0\u00de\3\2\2\2\u00e0\u00e1\3\2\2\2\u00e1\27\3\2\2\2\u00e2\u00e0"+ - "\3\2\2\2\u00e3\u00e6\7R\2\2\u00e4\u00e5\7<\2\2\u00e5\u00e7\5\36\20\2\u00e6"+ - "\u00e4\3\2\2\2\u00e6\u00e7\3\2\2\2\u00e7\31\3\2\2\2\u00e8\u00e9\7\31\2"+ - "\2\u00e9\u00ea\7\t\2\2\u00ea\u00eb\7Q\2\2\u00eb\u00ec\7R\2\2\u00ec\u00ed"+ - "\7\n\2\2\u00ed\u00ee\5\f\7\2\u00ee\33\3\2\2\2\u00ef\u00f0\t\2\2\2\u00f0"+ - "\35\3\2\2\2\u00f1\u00f2\b\20\1\2\u00f2\u00f3\5 \21\2\u00f3\u0123\3\2\2"+ - "\2\u00f4\u00f5\f\20\2\2\u00f5\u00f6\t\3\2\2\u00f6\u0122\5\36\20\21\u00f7"+ - "\u00f8\f\17\2\2\u00f8\u00f9\t\4\2\2\u00f9\u0122\5\36\20\20\u00fa\u00fb"+ - "\f\16\2\2\u00fb\u00fc\t\5\2\2\u00fc\u0122\5\36\20\17\u00fd\u00fe\f\r\2"+ - "\2\u00fe\u00ff\t\6\2\2\u00ff\u0122\5\36\20\16\u0100\u0101\f\f\2\2\u0101"+ - "\u0102\t\7\2\2\u0102\u0122\5\36\20\r\u0103\u0104\f\n\2\2\u0104\u0105\t"+ - "\b\2\2\u0105\u0122\5\36\20\13\u0106\u0107\f\t\2\2\u0107\u0108\7/\2\2\u0108"+ - "\u0122\5\36\20\n\u0109\u010a\f\b\2\2\u010a\u010b\7\60\2\2\u010b\u0122"+ - "\5\36\20\t\u010c\u010d\f\7\2\2\u010d\u010e\7\61\2\2\u010e\u0122\5\36\20"+ - "\b\u010f\u0110\f\6\2\2\u0110\u0111\7\62\2\2\u0111\u0122\5\36\20\7\u0112"+ - "\u0113\f\5\2\2\u0113\u0114\7\63\2\2\u0114\u0122\5\36\20\6\u0115\u0116"+ - "\f\4\2\2\u0116\u0117\7\64\2\2\u0117\u0118\5\36\20\2\u0118\u0119\7\65\2"+ - "\2\u0119\u011a\5\36\20\4\u011a\u0122\3\2\2\2\u011b\u011c\f\3\2\2\u011c"+ - "\u011d\t\t\2\2\u011d\u0122\5\36\20\3\u011e\u011f\f\13\2\2\u011f\u0120"+ - "\7\34\2\2\u0120\u0122\5\26\f\2\u0121\u00f4\3\2\2\2\u0121\u00f7\3\2\2\2"+ - "\u0121\u00fa\3\2\2\2\u0121\u00fd\3\2\2\2\u0121\u0100\3\2\2\2\u0121\u0103"+ - "\3\2\2\2\u0121\u0106\3\2\2\2\u0121\u0109\3\2\2\2\u0121\u010c\3\2\2\2\u0121"+ - "\u010f\3\2\2\2\u0121\u0112\3\2\2\2\u0121\u0115\3\2\2\2\u0121\u011b\3\2"+ - "\2\2\u0121\u011e\3\2\2\2\u0122\u0125\3\2\2\2\u0123\u0121\3\2\2\2\u0123"+ - "\u0124\3\2\2\2\u0124\37\3\2\2\2\u0125\u0123\3\2\2\2\u0126\u0127\t\n\2"+ - "\2\u0127\u0134\5\"\22\2\u0128\u0129\5\"\22\2\u0129\u012a\t\n\2\2\u012a"+ - "\u0134\3\2\2\2\u012b\u0134\5\"\22\2\u012c\u012d\t\13\2\2\u012d\u0134\5"+ - " \21\2\u012e\u012f\7\t\2\2\u012f\u0130\5\26\f\2\u0130\u0131\7\n\2\2\u0131"+ - "\u0132\5 \21\2\u0132\u0134\3\2\2\2\u0133\u0126\3\2\2\2\u0133\u0128\3\2"+ - "\2\2\u0133\u012b\3\2\2\2\u0133\u012c\3\2\2\2\u0133\u012e\3\2\2\2\u0134"+ - "!\3\2\2\2\u0135\u0139\5$\23\2\u0136\u0138\5&\24\2\u0137\u0136\3\2\2\2"+ - "\u0138\u013b\3\2\2\2\u0139\u0137\3\2\2\2\u0139\u013a\3\2\2\2\u013a\u0146"+ - "\3\2\2\2\u013b\u0139\3\2\2\2\u013c\u013d\5\26\f\2\u013d\u0141\5(\25\2"+ - "\u013e\u0140\5&\24\2\u013f\u013e\3\2\2\2\u0140\u0143\3\2\2\2\u0141\u013f"+ - "\3\2\2\2\u0141\u0142\3\2\2\2\u0142\u0146\3\2\2\2\u0143\u0141\3\2\2\2\u0144"+ - "\u0146\5\60\31\2\u0145\u0135\3\2\2\2\u0145\u013c\3\2\2\2\u0145\u0144\3"+ - "\2\2\2\u0146#\3\2\2\2\u0147\u0148\7\t\2\2\u0148\u0149\5\36\20\2\u0149"+ - "\u014a\7\n\2\2\u014a\u015a\3\2\2\2\u014b\u015a\t\f\2\2\u014c\u015a\7N"+ - "\2\2\u014d\u015a\7O\2\2\u014e\u015a\7P\2\2\u014f\u015a\7L\2\2\u0150\u015a"+ - "\7M\2\2\u0151\u015a\5\62\32\2\u0152\u015a\5\64\33\2\u0153\u015a\7R\2\2"+ - "\u0154\u0155\7R\2\2\u0155\u015a\58\35\2\u0156\u0157\7\27\2\2\u0157\u0158"+ - "\7Q\2\2\u0158\u015a\58\35\2\u0159\u0147\3\2\2\2\u0159\u014b\3\2\2\2\u0159"+ - "\u014c\3\2\2\2\u0159\u014d\3\2\2\2\u0159\u014e\3\2\2\2\u0159\u014f\3\2"+ - "\2\2\u0159\u0150\3\2\2\2\u0159\u0151\3\2\2\2\u0159\u0152\3\2\2\2\u0159"+ - "\u0153\3\2\2\2\u0159\u0154\3\2\2\2\u0159\u0156\3\2\2\2\u015a%\3\2\2\2"+ - "\u015b\u015f\5*\26\2\u015c\u015f\5,\27\2\u015d\u015f\5.\30\2\u015e\u015b"+ - "\3\2\2\2\u015e\u015c\3\2\2\2\u015e\u015d\3\2\2\2\u015f\'\3\2\2\2\u0160"+ - "\u0163\5*\26\2\u0161\u0163\5,\27\2\u0162\u0160\3\2\2\2\u0162\u0161\3\2"+ - "\2\2\u0163)\3\2\2\2\u0164\u0165\7\13\2\2\u0165\u0166\7T\2\2\u0166\u0167"+ - "\58\35\2\u0167+\3\2\2\2\u0168\u0169\7\13\2\2\u0169\u016a\t\r\2\2\u016a"+ - "-\3\2\2\2\u016b\u016c\7\7\2\2\u016c\u016d\5\36\20\2\u016d\u016e\7\b\2"+ - "\2\u016e/\3\2\2\2\u016f\u0170\7\27\2\2\u0170\u0175\7Q\2\2\u0171\u0172"+ - "\7\7\2\2\u0172\u0173\5\36\20\2\u0173\u0174\7\b\2\2\u0174\u0176\3\2\2\2"+ - "\u0175\u0171\3\2\2\2\u0176\u0177\3\2\2\2\u0177\u0175\3\2\2\2\u0177\u0178"+ - "\3\2\2\2\u0178\u0180\3\2\2\2\u0179\u017d\5(\25\2\u017a\u017c\5&\24\2\u017b"+ - "\u017a\3\2\2\2\u017c\u017f\3\2\2\2\u017d\u017b\3\2\2\2\u017d\u017e\3\2"+ - "\2\2\u017e\u0181\3\2\2\2\u017f\u017d\3\2\2\2\u0180\u0179\3\2\2\2\u0180"+ - "\u0181\3\2\2\2\u0181\u019c\3\2\2\2\u0182\u0183\7\27\2\2\u0183\u0184\7"+ - "Q\2\2\u0184\u0185\7\7\2\2\u0185\u0186\7\b\2\2\u0186\u018f\7\5\2\2\u0187"+ - "\u018c\5\36\20\2\u0188\u0189\7\f\2\2\u0189\u018b\5\36\20\2\u018a\u0188"+ - "\3\2\2\2\u018b\u018e\3\2\2\2\u018c\u018a\3\2\2\2\u018c\u018d\3\2\2\2\u018d"+ - "\u0190\3\2\2\2\u018e\u018c\3\2\2\2\u018f\u0187\3\2\2\2\u018f\u0190\3\2"+ - "\2\2\u0190\u0192\3\2\2\2\u0191\u0193\7\r\2\2\u0192\u0191\3\2\2\2\u0192"+ - "\u0193\3\2\2\2\u0193\u0194\3\2\2\2\u0194\u0198\7\6\2\2\u0195\u0197\5&"+ - "\24\2\u0196\u0195\3\2\2\2\u0197\u019a\3\2\2\2\u0198\u0196\3\2\2\2\u0198"+ - "\u0199\3\2\2\2\u0199\u019c\3\2\2\2\u019a\u0198\3\2\2\2\u019b\u016f\3\2"+ - "\2\2\u019b\u0182\3\2\2\2\u019c\61\3\2\2\2\u019d\u019e\7\7\2\2\u019e\u01a3"+ - "\5\36\20\2\u019f\u01a0\7\f\2\2\u01a0\u01a2\5\36\20\2\u01a1\u019f\3\2\2"+ - "\2\u01a2\u01a5\3\2\2\2\u01a3\u01a1\3\2\2\2\u01a3\u01a4\3\2\2\2\u01a4\u01a6"+ - "\3\2\2\2\u01a5\u01a3\3\2\2\2\u01a6\u01a7\7\b\2\2\u01a7\u01ab\3\2\2\2\u01a8"+ - "\u01a9\7\7\2\2\u01a9\u01ab\7\b\2\2\u01aa\u019d\3\2\2\2\u01aa\u01a8\3\2"+ - "\2\2\u01ab\63\3\2\2\2\u01ac\u01ad\7\7\2\2\u01ad\u01b2\5\66\34\2\u01ae"+ - "\u01af\7\f\2\2\u01af\u01b1\5\66\34\2\u01b0\u01ae\3\2\2\2\u01b1\u01b4\3"+ - "\2\2\2\u01b2\u01b0\3\2\2\2\u01b2\u01b3\3\2\2\2\u01b3\u01b5\3\2\2\2\u01b4"+ - "\u01b2\3\2\2\2\u01b5\u01b6\7\b\2\2\u01b6\u01bb\3\2\2\2\u01b7\u01b8\7\7"+ - "\2\2\u01b8\u01b9\7\65\2\2\u01b9\u01bb\7\b\2\2\u01ba\u01ac\3\2\2\2\u01ba"+ - "\u01b7\3\2\2\2\u01bb\65\3\2\2\2\u01bc\u01bd\5\36\20\2\u01bd\u01be\7\65"+ - "\2\2\u01be\u01bf\5\36\20\2\u01bf\67\3\2\2\2\u01c0\u01c9\7\t\2\2\u01c1"+ - "\u01c6\5:\36\2\u01c2\u01c3\7\f\2\2\u01c3\u01c5\5:\36\2\u01c4\u01c2\3\2"+ - "\2\2\u01c5\u01c8\3\2\2\2\u01c6\u01c4\3\2\2\2\u01c6\u01c7\3\2\2\2\u01c7"+ - "\u01ca\3\2\2\2\u01c8\u01c6\3\2\2\2\u01c9\u01c1\3\2\2\2\u01c9\u01ca\3\2"+ - "\2\2\u01ca\u01cb\3\2\2\2\u01cb\u01cc\7\n\2\2\u01cc9\3\2\2\2\u01cd\u01d1"+ - "\5\36\20\2\u01ce\u01d1\5<\37\2\u01cf\u01d1\5@!\2\u01d0\u01cd\3\2\2\2\u01d0"+ - "\u01ce\3\2\2\2\u01d0\u01cf\3\2\2\2\u01d1;\3\2\2\2\u01d2\u01e0\5> \2\u01d3"+ - "\u01dc\7\t\2\2\u01d4\u01d9\5> \2\u01d5\u01d6\7\f\2\2\u01d6\u01d8\5> \2"+ - "\u01d7\u01d5\3\2\2\2\u01d8\u01db\3\2\2\2\u01d9\u01d7\3\2\2\2\u01d9\u01da"+ - "\3\2\2\2\u01da\u01dd\3\2\2\2\u01db\u01d9\3\2\2\2\u01dc\u01d4\3\2\2\2\u01dc"+ - "\u01dd\3\2\2\2\u01dd\u01de\3\2\2\2\u01de\u01e0\7\n\2\2\u01df\u01d2\3\2"+ - "\2\2\u01df\u01d3\3\2\2\2\u01e0\u01e1\3\2\2\2\u01e1\u01e4\7\67\2\2\u01e2"+ - "\u01e5\5\f\7\2\u01e3\u01e5\5\36\20\2\u01e4\u01e2\3\2\2\2\u01e4\u01e3\3"+ - "\2\2\2\u01e5=\3\2\2\2\u01e6\u01e8\5\26\f\2\u01e7\u01e6\3\2\2\2\u01e7\u01e8"+ - "\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\u01ea\7R\2\2\u01ea?\3\2\2\2\u01eb\u01ec"+ - "\7Q\2\2\u01ec\u01ed\7\66\2\2\u01ed\u01f9\7R\2\2\u01ee\u01ef\5\26\f\2\u01ef"+ - "\u01f0\7\66\2\2\u01f0\u01f1\7\27\2\2\u01f1\u01f9\3\2\2\2\u01f2\u01f3\7"+ - "R\2\2\u01f3\u01f4\7\66\2\2\u01f4\u01f9\7R\2\2\u01f5\u01f6\7\33\2\2\u01f6"+ - "\u01f7\7\66\2\2\u01f7\u01f9\7R\2\2\u01f8\u01eb\3\2\2\2\u01f8\u01ee\3\2"+ - "\2\2\u01f8\u01f2\3\2\2\2\u01f8\u01f5\3\2\2\2\u01f9A\3\2\2\2\62EK^amu\u0082"+ - "\u0086\u008a\u008f\u00b2\u00bb\u00bf\u00c5\u00ce\u00d8\u00e0\u00e6\u0121"+ - "\u0123\u0133\u0139\u0141\u0145\u0159\u015e\u0162\u0177\u017d\u0180\u018c"+ - "\u018f\u0192\u0198\u019b\u01a3\u01aa\u01b2\u01ba\u01c6\u01c9\u01d0\u01d9"+ - "\u01dc\u01df\u01e4\u01e7\u01f8"; + "\25\5\25\u0163\n\25\3\26\5\26\u0166\n\26\3\26\3\26\3\26\3\26\3\27\5\27"+ + "\u016d\n\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31"+ + "\3\31\6\31\u017c\n\31\r\31\16\31\u017d\3\31\3\31\7\31\u0182\n\31\f\31"+ + "\16\31\u0185\13\31\5\31\u0187\n\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31"+ + "\3\31\7\31\u0191\n\31\f\31\16\31\u0194\13\31\5\31\u0196\n\31\3\31\5\31"+ + "\u0199\n\31\3\31\3\31\7\31\u019d\n\31\f\31\16\31\u01a0\13\31\5\31\u01a2"+ + "\n\31\3\32\3\32\3\32\3\32\7\32\u01a8\n\32\f\32\16\32\u01ab\13\32\3\32"+ + "\3\32\3\32\3\32\5\32\u01b1\n\32\3\33\3\33\3\33\3\33\7\33\u01b7\n\33\f"+ + "\33\16\33\u01ba\13\33\3\33\3\33\3\33\3\33\3\33\5\33\u01c1\n\33\3\34\3"+ + "\34\3\34\3\34\3\35\3\35\3\35\3\35\7\35\u01cb\n\35\f\35\16\35\u01ce\13"+ + "\35\5\35\u01d0\n\35\3\35\3\35\3\36\3\36\3\36\5\36\u01d7\n\36\3\37\3\37"+ + "\3\37\3\37\3\37\7\37\u01de\n\37\f\37\16\37\u01e1\13\37\5\37\u01e3\n\37"+ + "\3\37\5\37\u01e6\n\37\3\37\3\37\3\37\5\37\u01eb\n\37\3 \5 \u01ee\n \3"+ + " \3 \3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\5!\u01ff\n!\3!\2\3\36\"\2"+ + "\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@\2\16\3"+ + "\3\r\r\3\2\37!\3\2\"#\3\289\3\2$&\3\2\'*\3\2+.\3\2\u01ed"+ + "\3\2\2\2@\u01fe\3\2\2\2BD\5\4\3\2CB\3\2\2\2DG\3\2\2\2EC\3\2\2\2EF\3\2"+ + "\2\2FK\3\2\2\2GE\3\2\2\2HJ\5\b\5\2IH\3\2\2\2JM\3\2\2\2KI\3\2\2\2KL\3\2"+ + "\2\2LN\3\2\2\2MK\3\2\2\2NO\7\2\2\3O\3\3\2\2\2PQ\5\26\f\2QR\7R\2\2RS\5"+ + "\6\4\2ST\5\f\7\2T\5\3\2\2\2Ua\7\t\2\2VW\5\26\f\2W^\7R\2\2XY\7\f\2\2YZ"+ + "\5\26\f\2Z[\7R\2\2[]\3\2\2\2\\X\3\2\2\2]`\3\2\2\2^\\\3\2\2\2^_\3\2\2\2"+ + "_b\3\2\2\2`^\3\2\2\2aV\3\2\2\2ab\3\2\2\2bc\3\2\2\2cd\7\n\2\2d\7\3\2\2"+ + "\2ef\7\16\2\2fg\7\t\2\2gh\5\36\20\2hi\7\n\2\2im\5\n\6\2jk\7\20\2\2kn\5"+ + "\n\6\2ln\6\5\2\2mj\3\2\2\2ml\3\2\2\2n\u00bc\3\2\2\2op\7\21\2\2pq\7\t\2"+ + "\2qr\5\36\20\2ru\7\n\2\2sv\5\n\6\2tv\5\16\b\2us\3\2\2\2ut\3\2\2\2v\u00bc"+ + "\3\2\2\2wx\7\22\2\2xy\5\f\7\2yz\7\21\2\2z{\7\t\2\2{|\5\36\20\2|}\7\n\2"+ + "\2}~\5\34\17\2~\u00bc\3\2\2\2\177\u0080\7\23\2\2\u0080\u0082\7\t\2\2\u0081"+ + "\u0083\5\20\t\2\u0082\u0081\3\2\2\2\u0082\u0083\3\2\2\2\u0083\u0084\3"+ + "\2\2\2\u0084\u0086\7\r\2\2\u0085\u0087\5\36\20\2\u0086\u0085\3\2\2\2\u0086"+ + "\u0087\3\2\2\2\u0087\u0088\3\2\2\2\u0088\u008a\7\r\2\2\u0089\u008b\5\22"+ + "\n\2\u008a\u0089\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\3\2\2\2\u008c"+ + "\u008f\7\n\2\2\u008d\u0090\5\n\6\2\u008e\u0090\5\16\b\2\u008f\u008d\3"+ + "\2\2\2\u008f\u008e\3\2\2\2\u0090\u00bc\3\2\2\2\u0091\u0092\7\23\2\2\u0092"+ + "\u0093\7\t\2\2\u0093\u0094\5\26\f\2\u0094\u0095\7R\2\2\u0095\u0096\7\65"+ + "\2\2\u0096\u0097\5\36\20\2\u0097\u0098\7\n\2\2\u0098\u0099\5\n\6\2\u0099"+ + "\u00bc\3\2\2\2\u009a\u009b\7\23\2\2\u009b\u009c\7\t\2\2\u009c\u009d\7"+ + "R\2\2\u009d\u009e\7\17\2\2\u009e\u009f\5\36\20\2\u009f\u00a0\7\n\2\2\u00a0"+ + "\u00a1\5\n\6\2\u00a1\u00bc\3\2\2\2\u00a2\u00a3\5\24\13\2\u00a3\u00a4\5"+ + "\34\17\2\u00a4\u00bc\3\2\2\2\u00a5\u00a6\7\24\2\2\u00a6\u00bc\5\34\17"+ + "\2\u00a7\u00a8\7\25\2\2\u00a8\u00bc\5\34\17\2\u00a9\u00aa\7\26\2\2\u00aa"+ + "\u00ab\5\36\20\2\u00ab\u00ac\5\34\17\2\u00ac\u00bc\3\2\2\2\u00ad\u00ae"+ + "\7\30\2\2\u00ae\u00b0\5\f\7\2\u00af\u00b1\5\32\16\2\u00b0\u00af\3\2\2"+ + "\2\u00b1\u00b2\3\2\2\2\u00b2\u00b0\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3\u00bc"+ + "\3\2\2\2\u00b4\u00b5\7\32\2\2\u00b5\u00b6\5\36\20\2\u00b6\u00b7\5\34\17"+ + "\2\u00b7\u00bc\3\2\2\2\u00b8\u00b9\5\36\20\2\u00b9\u00ba\5\34\17\2\u00ba"+ + "\u00bc\3\2\2\2\u00bbe\3\2\2\2\u00bbo\3\2\2\2\u00bbw\3\2\2\2\u00bb\177"+ + "\3\2\2\2\u00bb\u0091\3\2\2\2\u00bb\u009a\3\2\2\2\u00bb\u00a2\3\2\2\2\u00bb"+ + "\u00a5\3\2\2\2\u00bb\u00a7\3\2\2\2\u00bb\u00a9\3\2\2\2\u00bb\u00ad\3\2"+ + "\2\2\u00bb\u00b4\3\2\2\2\u00bb\u00b8\3\2\2\2\u00bc\t\3\2\2\2\u00bd\u00c0"+ + "\5\f\7\2\u00be\u00c0\5\b\5\2\u00bf\u00bd\3\2\2\2\u00bf\u00be\3\2\2\2\u00c0"+ + "\13\3\2\2\2\u00c1\u00c5\7\5\2\2\u00c2\u00c4\5\b\5\2\u00c3\u00c2\3\2\2"+ + "\2\u00c4\u00c7\3\2\2\2\u00c5\u00c3\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c6\u00c8"+ + "\3\2\2\2\u00c7\u00c5\3\2\2\2\u00c8\u00c9\7\6\2\2\u00c9\r\3\2\2\2\u00ca"+ + "\u00cb\7\r\2\2\u00cb\17\3\2\2\2\u00cc\u00cf\5\24\13\2\u00cd\u00cf\5\36"+ + "\20\2\u00ce\u00cc\3\2\2\2\u00ce\u00cd\3\2\2\2\u00cf\21\3\2\2\2\u00d0\u00d1"+ + "\5\36\20\2\u00d1\23\3\2\2\2\u00d2\u00d3\5\26\f\2\u00d3\u00d8\5\30\r\2"+ + "\u00d4\u00d5\7\f\2\2\u00d5\u00d7\5\30\r\2\u00d6\u00d4\3\2\2\2\u00d7\u00da"+ + "\3\2\2\2\u00d8\u00d6\3\2\2\2\u00d8\u00d9\3\2\2\2\u00d9\25\3\2\2\2\u00da"+ + "\u00d8\3\2\2\2\u00db\u00e0\7Q\2\2\u00dc\u00dd\7\7\2\2\u00dd\u00df\7\b"+ + "\2\2\u00de\u00dc\3\2\2\2\u00df\u00e2\3\2\2\2\u00e0\u00de\3\2\2\2\u00e0"+ + "\u00e1\3\2\2\2\u00e1\27\3\2\2\2\u00e2\u00e0\3\2\2\2\u00e3\u00e6\7R\2\2"+ + "\u00e4\u00e5\7<\2\2\u00e5\u00e7\5\36\20\2\u00e6\u00e4\3\2\2\2\u00e6\u00e7"+ + "\3\2\2\2\u00e7\31\3\2\2\2\u00e8\u00e9\7\31\2\2\u00e9\u00ea\7\t\2\2\u00ea"+ + "\u00eb\7Q\2\2\u00eb\u00ec\7R\2\2\u00ec\u00ed\7\n\2\2\u00ed\u00ee\5\f\7"+ + "\2\u00ee\33\3\2\2\2\u00ef\u00f0\t\2\2\2\u00f0\35\3\2\2\2\u00f1\u00f2\b"+ + "\20\1\2\u00f2\u00f3\5 \21\2\u00f3\u0123\3\2\2\2\u00f4\u00f5\f\20\2\2\u00f5"+ + "\u00f6\t\3\2\2\u00f6\u0122\5\36\20\21\u00f7\u00f8\f\17\2\2\u00f8\u00f9"+ + "\t\4\2\2\u00f9\u0122\5\36\20\20\u00fa\u00fb\f\16\2\2\u00fb\u00fc\t\5\2"+ + "\2\u00fc\u0122\5\36\20\17\u00fd\u00fe\f\r\2\2\u00fe\u00ff\t\6\2\2\u00ff"+ + "\u0122\5\36\20\16\u0100\u0101\f\f\2\2\u0101\u0102\t\7\2\2\u0102\u0122"+ + "\5\36\20\r\u0103\u0104\f\n\2\2\u0104\u0105\t\b\2\2\u0105\u0122\5\36\20"+ + "\13\u0106\u0107\f\t\2\2\u0107\u0108\7/\2\2\u0108\u0122\5\36\20\n\u0109"+ + "\u010a\f\b\2\2\u010a\u010b\7\60\2\2\u010b\u0122\5\36\20\t\u010c\u010d"+ + "\f\7\2\2\u010d\u010e\7\61\2\2\u010e\u0122\5\36\20\b\u010f\u0110\f\6\2"+ + "\2\u0110\u0111\7\62\2\2\u0111\u0122\5\36\20\7\u0112\u0113\f\5\2\2\u0113"+ + "\u0114\7\63\2\2\u0114\u0122\5\36\20\6\u0115\u0116\f\4\2\2\u0116\u0117"+ + "\7\64\2\2\u0117\u0118\5\36\20\2\u0118\u0119\7\65\2\2\u0119\u011a\5\36"+ + "\20\4\u011a\u0122\3\2\2\2\u011b\u011c\f\3\2\2\u011c\u011d\t\t\2\2\u011d"+ + "\u0122\5\36\20\3\u011e\u011f\f\13\2\2\u011f\u0120\7\34\2\2\u0120\u0122"+ + "\5\26\f\2\u0121\u00f4\3\2\2\2\u0121\u00f7\3\2\2\2\u0121\u00fa\3\2\2\2"+ + "\u0121\u00fd\3\2\2\2\u0121\u0100\3\2\2\2\u0121\u0103\3\2\2\2\u0121\u0106"+ + "\3\2\2\2\u0121\u0109\3\2\2\2\u0121\u010c\3\2\2\2\u0121\u010f\3\2\2\2\u0121"+ + "\u0112\3\2\2\2\u0121\u0115\3\2\2\2\u0121\u011b\3\2\2\2\u0121\u011e\3\2"+ + "\2\2\u0122\u0125\3\2\2\2\u0123\u0121\3\2\2\2\u0123\u0124\3\2\2\2\u0124"+ + "\37\3\2\2\2\u0125\u0123\3\2\2\2\u0126\u0127\t\n\2\2\u0127\u0134\5\"\22"+ + "\2\u0128\u0129\5\"\22\2\u0129\u012a\t\n\2\2\u012a\u0134\3\2\2\2\u012b"+ + "\u0134\5\"\22\2\u012c\u012d\t\13\2\2\u012d\u0134\5 \21\2\u012e\u012f\7"+ + "\t\2\2\u012f\u0130\5\26\f\2\u0130\u0131\7\n\2\2\u0131\u0132\5 \21\2\u0132"+ + "\u0134\3\2\2\2\u0133\u0126\3\2\2\2\u0133\u0128\3\2\2\2\u0133\u012b\3\2"+ + "\2\2\u0133\u012c\3\2\2\2\u0133\u012e\3\2\2\2\u0134!\3\2\2\2\u0135\u0139"+ + "\5$\23\2\u0136\u0138\5&\24\2\u0137\u0136\3\2\2\2\u0138\u013b\3\2\2\2\u0139"+ + "\u0137\3\2\2\2\u0139\u013a\3\2\2\2\u013a\u0146\3\2\2\2\u013b\u0139\3\2"+ + "\2\2\u013c\u013d\5\26\f\2\u013d\u0141\5(\25\2\u013e\u0140\5&\24\2\u013f"+ + "\u013e\3\2\2\2\u0140\u0143\3\2\2\2\u0141\u013f\3\2\2\2\u0141\u0142\3\2"+ + "\2\2\u0142\u0146\3\2\2\2\u0143\u0141\3\2\2\2\u0144\u0146\5\60\31\2\u0145"+ + "\u0135\3\2\2\2\u0145\u013c\3\2\2\2\u0145\u0144\3\2\2\2\u0146#\3\2\2\2"+ + "\u0147\u0148\7\t\2\2\u0148\u0149\5\36\20\2\u0149\u014a\7\n\2\2\u014a\u015a"+ + "\3\2\2\2\u014b\u015a\t\f\2\2\u014c\u015a\7N\2\2\u014d\u015a\7O\2\2\u014e"+ + "\u015a\7P\2\2\u014f\u015a\7L\2\2\u0150\u015a\7M\2\2\u0151\u015a\5\62\32"+ + "\2\u0152\u015a\5\64\33\2\u0153\u015a\7R\2\2\u0154\u0155\7R\2\2\u0155\u015a"+ + "\58\35\2\u0156\u0157\7\27\2\2\u0157\u0158\7Q\2\2\u0158\u015a\58\35\2\u0159"+ + "\u0147\3\2\2\2\u0159\u014b\3\2\2\2\u0159\u014c\3\2\2\2\u0159\u014d\3\2"+ + "\2\2\u0159\u014e\3\2\2\2\u0159\u014f\3\2\2\2\u0159\u0150\3\2\2\2\u0159"+ + "\u0151\3\2\2\2\u0159\u0152\3\2\2\2\u0159\u0153\3\2\2\2\u0159\u0154\3\2"+ + "\2\2\u0159\u0156\3\2\2\2\u015a%\3\2\2\2\u015b\u015f\5*\26\2\u015c\u015f"+ + "\5,\27\2\u015d\u015f\5.\30\2\u015e\u015b\3\2\2\2\u015e\u015c\3\2\2\2\u015e"+ + "\u015d\3\2\2\2\u015f\'\3\2\2\2\u0160\u0163\5*\26\2\u0161\u0163\5,\27\2"+ + "\u0162\u0160\3\2\2\2\u0162\u0161\3\2\2\2\u0163)\3\2\2\2\u0164\u0166\7"+ + "\64\2\2\u0165\u0164\3\2\2\2\u0165\u0166\3\2\2\2\u0166\u0167\3\2\2\2\u0167"+ + "\u0168\7\13\2\2\u0168\u0169\7T\2\2\u0169\u016a\58\35\2\u016a+\3\2\2\2"+ + "\u016b\u016d\7\64\2\2\u016c\u016b\3\2\2\2\u016c\u016d\3\2\2\2\u016d\u016e"+ + "\3\2\2\2\u016e\u016f\7\13\2\2\u016f\u0170\t\r\2\2\u0170-\3\2\2\2\u0171"+ + "\u0172\7\7\2\2\u0172\u0173\5\36\20\2\u0173\u0174\7\b\2\2\u0174/\3\2\2"+ + "\2\u0175\u0176\7\27\2\2\u0176\u017b\7Q\2\2\u0177\u0178\7\7\2\2\u0178\u0179"+ + "\5\36\20\2\u0179\u017a\7\b\2\2\u017a\u017c\3\2\2\2\u017b\u0177\3\2\2\2"+ + "\u017c\u017d\3\2\2\2\u017d\u017b\3\2\2\2\u017d\u017e\3\2\2\2\u017e\u0186"+ + "\3\2\2\2\u017f\u0183\5(\25\2\u0180\u0182\5&\24\2\u0181\u0180\3\2\2\2\u0182"+ + "\u0185\3\2\2\2\u0183\u0181\3\2\2\2\u0183\u0184\3\2\2\2\u0184\u0187\3\2"+ + "\2\2\u0185\u0183\3\2\2\2\u0186\u017f\3\2\2\2\u0186\u0187\3\2\2\2\u0187"+ + "\u01a2\3\2\2\2\u0188\u0189\7\27\2\2\u0189\u018a\7Q\2\2\u018a\u018b\7\7"+ + "\2\2\u018b\u018c\7\b\2\2\u018c\u0195\7\5\2\2\u018d\u0192\5\36\20\2\u018e"+ + "\u018f\7\f\2\2\u018f\u0191\5\36\20\2\u0190\u018e\3\2\2\2\u0191\u0194\3"+ + "\2\2\2\u0192\u0190\3\2\2\2\u0192\u0193\3\2\2\2\u0193\u0196\3\2\2\2\u0194"+ + "\u0192\3\2\2\2\u0195\u018d\3\2\2\2\u0195\u0196\3\2\2\2\u0196\u0198\3\2"+ + "\2\2\u0197\u0199\7\r\2\2\u0198\u0197\3\2\2\2\u0198\u0199\3\2\2\2\u0199"+ + "\u019a\3\2\2\2\u019a\u019e\7\6\2\2\u019b\u019d\5&\24\2\u019c\u019b\3\2"+ + "\2\2\u019d\u01a0\3\2\2\2\u019e\u019c\3\2\2\2\u019e\u019f\3\2\2\2\u019f"+ + "\u01a2\3\2\2\2\u01a0\u019e\3\2\2\2\u01a1\u0175\3\2\2\2\u01a1\u0188\3\2"+ + "\2\2\u01a2\61\3\2\2\2\u01a3\u01a4\7\7\2\2\u01a4\u01a9\5\36\20\2\u01a5"+ + "\u01a6\7\f\2\2\u01a6\u01a8\5\36\20\2\u01a7\u01a5\3\2\2\2\u01a8\u01ab\3"+ + "\2\2\2\u01a9\u01a7\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa\u01ac\3\2\2\2\u01ab"+ + "\u01a9\3\2\2\2\u01ac\u01ad\7\b\2\2\u01ad\u01b1\3\2\2\2\u01ae\u01af\7\7"+ + "\2\2\u01af\u01b1\7\b\2\2\u01b0\u01a3\3\2\2\2\u01b0\u01ae\3\2\2\2\u01b1"+ + "\63\3\2\2\2\u01b2\u01b3\7\7\2\2\u01b3\u01b8\5\66\34\2\u01b4\u01b5\7\f"+ + "\2\2\u01b5\u01b7\5\66\34\2\u01b6\u01b4\3\2\2\2\u01b7\u01ba\3\2\2\2\u01b8"+ + "\u01b6\3\2\2\2\u01b8\u01b9\3\2\2\2\u01b9\u01bb\3\2\2\2\u01ba\u01b8\3\2"+ + "\2\2\u01bb\u01bc\7\b\2\2\u01bc\u01c1\3\2\2\2\u01bd\u01be\7\7\2\2\u01be"+ + "\u01bf\7\65\2\2\u01bf\u01c1\7\b\2\2\u01c0\u01b2\3\2\2\2\u01c0\u01bd\3"+ + "\2\2\2\u01c1\65\3\2\2\2\u01c2\u01c3\5\36\20\2\u01c3\u01c4\7\65\2\2\u01c4"+ + "\u01c5\5\36\20\2\u01c5\67\3\2\2\2\u01c6\u01cf\7\t\2\2\u01c7\u01cc\5:\36"+ + "\2\u01c8\u01c9\7\f\2\2\u01c9\u01cb\5:\36\2\u01ca\u01c8\3\2\2\2\u01cb\u01ce"+ + "\3\2\2\2\u01cc\u01ca\3\2\2\2\u01cc\u01cd\3\2\2\2\u01cd\u01d0\3\2\2\2\u01ce"+ + "\u01cc\3\2\2\2\u01cf\u01c7\3\2\2\2\u01cf\u01d0\3\2\2\2\u01d0\u01d1\3\2"+ + "\2\2\u01d1\u01d2\7\n\2\2\u01d29\3\2\2\2\u01d3\u01d7\5\36\20\2\u01d4\u01d7"+ + "\5<\37\2\u01d5\u01d7\5@!\2\u01d6\u01d3\3\2\2\2\u01d6\u01d4\3\2\2\2\u01d6"+ + "\u01d5\3\2\2\2\u01d7;\3\2\2\2\u01d8\u01e6\5> \2\u01d9\u01e2\7\t\2\2\u01da"+ + "\u01df\5> \2\u01db\u01dc\7\f\2\2\u01dc\u01de\5> \2\u01dd\u01db\3\2\2\2"+ + "\u01de\u01e1\3\2\2\2\u01df\u01dd\3\2\2\2\u01df\u01e0\3\2\2\2\u01e0\u01e3"+ + "\3\2\2\2\u01e1\u01df\3\2\2\2\u01e2\u01da\3\2\2\2\u01e2\u01e3\3\2\2\2\u01e3"+ + "\u01e4\3\2\2\2\u01e4\u01e6\7\n\2\2\u01e5\u01d8\3\2\2\2\u01e5\u01d9\3\2"+ + "\2\2\u01e6\u01e7\3\2\2\2\u01e7\u01ea\7\67\2\2\u01e8\u01eb\5\f\7\2\u01e9"+ + "\u01eb\5\36\20\2\u01ea\u01e8\3\2\2\2\u01ea\u01e9\3\2\2\2\u01eb=\3\2\2"+ + "\2\u01ec\u01ee\5\26\f\2\u01ed\u01ec\3\2\2\2\u01ed\u01ee\3\2\2\2\u01ee"+ + "\u01ef\3\2\2\2\u01ef\u01f0\7R\2\2\u01f0?\3\2\2\2\u01f1\u01f2\7Q\2\2\u01f2"+ + "\u01f3\7\66\2\2\u01f3\u01ff\7R\2\2\u01f4\u01f5\5\26\f\2\u01f5\u01f6\7"+ + "\66\2\2\u01f6\u01f7\7\27\2\2\u01f7\u01ff\3\2\2\2\u01f8\u01f9\7R\2\2\u01f9"+ + "\u01fa\7\66\2\2\u01fa\u01ff\7R\2\2\u01fb\u01fc\7\33\2\2\u01fc\u01fd\7"+ + "\66\2\2\u01fd\u01ff\7R\2\2\u01fe\u01f1\3\2\2\2\u01fe\u01f4\3\2\2\2\u01fe"+ + "\u01f8\3\2\2\2\u01fe\u01fb\3\2\2\2\u01ffA\3\2\2\2\64EK^amu\u0082\u0086"+ + "\u008a\u008f\u00b2\u00bb\u00bf\u00c5\u00ce\u00d8\u00e0\u00e6\u0121\u0123"+ + "\u0133\u0139\u0141\u0145\u0159\u015e\u0162\u0165\u016c\u017d\u0183\u0186"+ + "\u0192\u0195\u0198\u019e\u01a1\u01a9\u01b0\u01b8\u01c0\u01cc\u01cf\u01d6"+ + "\u01df\u01e2\u01e5\u01ea\u01ed\u01fe"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java index 4f22137e5fb..3ac45705d55 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java @@ -27,7 +27,7 @@ import org.antlr.v4.runtime.TokenSource; import org.antlr.v4.runtime.misc.Pair; /** - * Token factory that preseres that last non-whitespace token so you can do token level lookbehind in the lexer. + * Token factory that preserves that last non-whitespace token so you can do token level lookbehind in the lexer. */ public class StashingTokenFactory implements TokenFactory { private final TokenFactory delegate; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index da430f4280a..5659afc75ea 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -898,7 +898,7 @@ public final class Walker extends PainlessParserBaseVisitor { String name = ctx.DOTID().getText(); List arguments = collectArguments(ctx.arguments()); - return new PCallInvoke(location(ctx), prefix, name, arguments); + return new PCallInvoke(location(ctx), prefix, name, ctx.COND() != null, arguments); } @Override @@ -917,7 +917,7 @@ public final class Walker extends PainlessParserBaseVisitor { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } - return new PField(location(ctx), prefix, value); + return new PField(location(ctx), prefix, ctx.COND() != null, value); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java index 1f9fe8bdfcb..54a4aceb734 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java @@ -34,7 +34,7 @@ import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Opcodes; /** - * Respresents a conditional expression. + * Represents a conditional expression. */ public final class EConditional extends AExpression { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java index 643861477e7..437b93d2770 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java @@ -58,8 +58,12 @@ public final class EDecimal extends AExpression { throw createError(new IllegalArgumentException("Invalid float constant [" + value + "].")); } } else { + String toParse = value; + if (toParse.endsWith("d") || value.endsWith("D")) { + toParse = toParse.substring(0, value.length() - 1); + } try { - constant = Double.parseDouble(value); + constant = Double.parseDouble(toParse); actual = Definition.DOUBLE_TYPE; } catch (NumberFormatException exception) { throw createError(new IllegalArgumentException("Invalid double constant [" + value + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java index 102a8491226..80436c7b34e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java @@ -104,6 +104,14 @@ public final class ENumeric extends AExpression { actual = Definition.INT_TYPE; } } catch (NumberFormatException exception) { + try { + // Check if we can parse as a long. If so then hint that the user might prefer that. + Long.parseLong(value, radix); + throw createError(new IllegalArgumentException("Invalid int constant [" + value + "]. If you want a long constant " + + "then change it to [" + value + "L].")); + } catch (NumberFormatException longNoGood) { + // Ignored + } throw createError(new IllegalArgumentException("Invalid int constant [" + value + "].")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index e8cfb1eba2e..9d405a7b00a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -39,14 +39,16 @@ import java.util.Set; public final class PCallInvoke extends AExpression { private final String name; + private final boolean nullSafe; private final List arguments; private AExpression sub = null; - public PCallInvoke(Location location, AExpression prefix, String name, List arguments) { + public PCallInvoke(Location location, AExpression prefix, String name, boolean nullSafe, List arguments) { super(location, prefix); this.name = Objects.requireNonNull(name); + this.nullSafe = nullSafe; this.arguments = Objects.requireNonNull(arguments); } @@ -87,6 +89,10 @@ public final class PCallInvoke extends AExpression { "Unknown call [" + name + "] with [" + arguments.size() + "] arguments on type [" + struct.name + "].")); } + if (nullSafe) { + sub = new PSubNullSafeCallInvoke(location, sub); + } + sub.expected = expected; sub.explicit = explicit; sub.analyze(locals); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index 21a3def3189..ea23d3cdd07 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -40,13 +40,15 @@ import java.util.Set; */ public final class PField extends AStoreable { + private final boolean nullSafe; private final String value; private AStoreable sub = null; - public PField(Location location, AExpression prefix, String value) { + public PField(Location location, AExpression prefix, boolean nullSafe, String value) { super(location, prefix); + this.nullSafe = nullSafe; this.value = Objects.requireNonNull(value); } @@ -106,6 +108,10 @@ public final class PField extends AStoreable { throw createError(new IllegalArgumentException("Unknown field [" + value + "] for type [" + prefix.actual.name + "].")); } + if (nullSafe) { + sub = new PSubNullSafeField(location, sub); + } + sub.write = write; sub.read = read; sub.expected = expected; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java new file mode 100644 index 00000000000..51349949dbd --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; +import org.objectweb.asm.Label; + +import java.util.Set; + +import static java.util.Objects.requireNonNull; + +/** + * Implements a call who's value is null if the prefix is null rather than throwing an NPE. + */ +public class PSubNullSafeCallInvoke extends AExpression { + /** + * The expression gaurded by the null check. Required at construction time and replaced at analysis time. + */ + private AExpression guarded; + + public PSubNullSafeCallInvoke(Location location, AExpression guarded) { + super(location); + this.guarded = requireNonNull(guarded); + } + + @Override + void extractVariables(Set variables) { + guarded.extractVariables(variables); + } + + @Override + void analyze(Locals locals) { + guarded.analyze(locals); + actual = guarded.actual; + if (actual.sort.primitive) { + // Result must be nullable. We emit boxing instructions if needed. + actual = Definition.getType(actual.sort.boxed.getSimpleName()); + } + } + + @Override + void write(MethodWriter writer, Globals globals) { + writer.writeDebugInfo(location); + + Label end = new Label(); + writer.dup(); + writer.ifNull(end); + guarded.write(writer, globals); + if (guarded.actual.sort.primitive) { + // Box primitives so they are nullable + writer.box(guarded.actual.type); + } + writer.mark(end); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java new file mode 100644 index 00000000000..32ad6c0cb62 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java @@ -0,0 +1,105 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; +import org.objectweb.asm.Label; + +import java.util.Set; + +/** + * Implements a field who's value is null if the prefix is null rather than throwing an NPE. + */ +public class PSubNullSafeField extends AStoreable { + private AStoreable guarded; + + public PSubNullSafeField(Location location, AStoreable guarded) { + super(location); + this.guarded = guarded; + } + + @Override + void extractVariables(Set variables) { + guarded.extractVariables(variables); + } + + @Override + void analyze(Locals locals) { + if (write) { + throw createError(new IllegalArgumentException("Can't write to null safe reference")); + } + guarded.read = read; + guarded.analyze(locals); + actual = guarded.actual; + if (actual.sort.primitive) { + // Result must be nullable. We emit boxing instructions if needed. + actual = Definition.getType(actual.sort.boxed.getSimpleName()); + } + } + + + @Override + int accessElementCount() { + return guarded.accessElementCount(); + } + + @Override + boolean isDefOptimized() { + return guarded.isDefOptimized(); + } + + @Override + void updateActual(Type actual) { + guarded.updateActual(actual); + } + + @Override + void write(MethodWriter writer, Globals globals) { + Label end = new Label(); + writer.dup(); + writer.ifNull(end); + guarded.write(writer, globals); + if (guarded.actual.sort.primitive) { + // Box primitives so they are nullable + writer.box(guarded.actual.type); + } + writer.mark(end); + } + + @Override + void setup(MethodWriter writer, Globals globals) { + throw createError(new IllegalArgumentException("Can't write to null safe field")); + } + + @Override + void load(MethodWriter writer, Globals globals) { + throw createError(new IllegalArgumentException("Can't write to null safe field")); + } + + @Override + void store(MethodWriter writer, Globals globals) { + throw createError(new IllegalArgumentException("Can't write to null safe field")); + } +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java index cbfdd31b143..f35ffc77f7b 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java @@ -2,6 +2,8 @@ package org.elasticsearch.painless; import java.util.Collections; +import static java.util.Collections.singletonMap; + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -25,16 +27,24 @@ public class BasicExpressionTests extends ScriptTestCase { /** simple tests returning a constant value */ public void testReturnConstant() { - assertEquals(5, exec("return 5;")); - assertEquals(7L, exec("return 7L;")); - assertEquals(7.0, exec("return 7.0;")); - assertEquals(32.0F, exec("return 32.0F;")); - assertEquals((byte)255, exec("return (byte)255;")); - assertEquals((short)5, exec("return (short)5;")); - assertEquals("string", exec("return \"string\";")); - assertEquals(true, exec("return true;")); - assertEquals(false, exec("return false;")); - assertNull(exec("return null;")); + assertEquals(5, exec("return 5")); + assertEquals(6L, exec("return 6l")); + assertEquals(7L, exec("return 7L")); + assertEquals(7.0d, exec("return 7.0")); + assertEquals(18.0d, exec("return 18d")); + assertEquals(19.0d, exec("return 19.0d")); + assertEquals(20.0d, exec("return 20D")); + assertEquals(21.0d, exec("return 21.0D")); + assertEquals(32.0F, exec("return 32.0f")); + assertEquals(33.0F, exec("return 33f")); + assertEquals(34.0F, exec("return 34.0F")); + assertEquals(35.0F, exec("return 35F")); + assertEquals((byte)255, exec("return (byte)255")); + assertEquals((short)5, exec("return (short)5")); + assertEquals("string", exec("return \"string\"")); + assertEquals(true, exec("return true")); + assertEquals(false, exec("return false")); + assertNull(exec("return null")); } public void testReturnConstantChar() { @@ -135,4 +145,73 @@ public class BasicExpressionTests extends ScriptTestCase { assertEquals(2, exec("int x = 5; return (x+x)/x;")); assertEquals(true, exec("boolean t = true, f = false; return t && (f || t);")); } + + public void testNullSafeDeref() { + // Objects in general + assertNull( exec("String a = null; return a?.toString()")); // Call + assertNull( exec("String a = null; return a?.length()")); // Call and box + assertEquals("foo", exec("String a = 'foo'; return a?.toString()")); // Call + assertEquals(Integer.valueOf(3), exec("String a = 'foo'; return a?.length()")); // Call and box + + // Maps + assertNull( exec("Map a = null; return a?.toString()")); // Call + assertNull( exec("Map a = null; return a?.size()")); // Call and box + assertNull( exec("Map a = null; return a?.other")); // Read shortcut + assertEquals("{}", exec("Map a = [:]; return a?.toString()")); // Call + assertEquals(0, exec("Map a = [:]; return a?.size()")); // Call and box + assertEquals(1, exec("Map a = ['other':1]; return a?.other")); // Read shortcut + + // Array + // Since you can't invoke methods on arrays we skip the toString and hashCode tests + assertNull( exec("int[] a = null; return a?.length")); // Length (boxed) + assertEquals(2, exec("int[] a = new int[] {2, 3}; return a?.length")); // Length (boxed) + + // Def + assertNull( exec("def a = null; return a?.getX()")); // Invoke + assertNull( exec("def a = null; return a?.x")); // Read shortcut + assertEquals(0, exec("def a = new org.elasticsearch.painless.FeatureTest(); return a?.getX()")); // Invoke + assertEquals(0, exec("def a = new org.elasticsearch.painless.FeatureTest(); return a?.x")); // Read shortcut + + // Results from maps (should just work but let's test anyway) + FeatureTest t = new FeatureTest(); + assertNull( exec("Map a = ['thing': params.t]; return a.other?.getX()", singletonMap("t", t), true)); + assertNull( exec("Map a = ['thing': params.t]; return a.other?.x", singletonMap("t", t), true)); + assertNull( exec("def a = ['thing': params.t]; return a.other?.getX()", singletonMap("t", t), true)); + assertNull( exec("def a = ['thing': params.t]; return a.other?.x", singletonMap("t", t), true)); + assertEquals(0, exec("Map a = ['other': params.t]; return a.other?.getX()", singletonMap("t", t), true)); + assertEquals(0, exec("Map a = ['other': params.t]; return a.other?.x", singletonMap("t", t), true)); + assertEquals(0, exec("def a = ['other': params.t]; return a.other?.getX()", singletonMap("t", t), true)); + assertEquals(0, exec("def a = ['other': params.t]; return a.other?.x", singletonMap("t", t), true)); + + // Chains + assertNull( exec("Map a = ['thing': ['cat': params.t]]; return a.other?.cat?.getX()", singletonMap("t", t), true)); + assertNull( exec("Map a = ['thing': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); + assertNull( exec("def a = ['thing': ['cat': params.t]]; return a.other?.cat?.getX()", singletonMap("t", t), true)); + assertNull( exec("def a = ['thing': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); + assertEquals(0, exec("Map a = ['other': ['cat': params.t]]; return a.other?.cat?.getX()", singletonMap("t", t), true)); + assertEquals(0, exec("Map a = ['other': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); + assertEquals(0, exec("def a = ['other': ['cat': params.t]]; return a.other?.cat?.getX()", singletonMap("t", t), true)); + assertEquals(0, exec("def a = ['other': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); + + // Check that we don't try to cast when the LHS doesn't provide an "expected" value + assertNull(exec( + "def a = [:];\n" + + "a.missing_length = a.missing?.length();\n" + + "return a.missing_length", true)); + assertEquals(3, exec( + "def a = [:];\n" + + "a.missing = 'foo';\n" + + "a.missing_length = a.missing?.length();\n" + + "return a.missing_length", true)); + + // Writes, all unsupported at this point +// assertEquals(null, exec("org.elasticsearch.painless.FeatureTest a = null; return a?.x")); // Read field +// assertEquals(null, exec("org.elasticsearch.painless.FeatureTest a = null; a?.x = 7; return a?.x")); // Write field +// assertEquals(null, exec("Map a = null; a?.other = 'wow'; return a?.other")); // Write shortcut +// assertEquals(null, exec("def a = null; a?.other = 'cat'; return a?.other")); // Write shortcut +// assertEquals(null, exec("Map a = ['thing': 'bar']; a.other?.cat = 'no'; return a.other?.cat")); +// assertEquals(null, exec("def a = ['thing': 'bar']; a.other?.cat = 'no'; return a.other?.cat")); +// assertEquals(null, exec("Map a = ['thing': 'bar']; a.other?.cat?.dog = 'wombat'; return a.other?.cat?.dog")); +// assertEquals(null, exec("def a = ['thing': 'bar']; a.other?.cat?.dog = 'wombat'; return a.other?.cat?.dog")); + } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java index 7e4311f24ec..664394a6d81 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java @@ -245,4 +245,20 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { () -> exec("", null, singletonMap(CompilerSettings.REGEX_ENABLED.getKey(), "true"), null, false)); assertEquals("[painless.regex.enabled] can only be set on node startup.", e.getMessage()); } + + public void testInvalidIntConstantSuggestsLong() { + IllegalArgumentException e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return 864000000000")); + assertEquals("Invalid int constant [864000000000]. If you want a long constant then change it to [864000000000L].", e.getMessage()); + assertEquals(864000000000L, exec("return 864000000000L")); + e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return -864000000000")); + assertEquals("Invalid int constant [-864000000000]. If you want a long constant then change it to [-864000000000L].", + e.getMessage()); + assertEquals(-864000000000L, exec("return -864000000000L")); + + // If it isn't a valid long we don't give any suggestions + e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return 92233720368547758070")); + assertEquals("Invalid int constant [92233720368547758070].", e.getMessage()); + e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return -92233720368547758070")); + assertEquals("Invalid int constant [-92233720368547758070].", e.getMessage()); + } } diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/15_update.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/15_update.yaml index a031cea86e5..9da3761d1b1 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/15_update.yaml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/15_update.yaml @@ -59,6 +59,59 @@ - match: { _source.foo: yyy } - match: { _source.count: 1 } + - do: + update: + index: test_1 + type: test + id: 1 + body: + script: + lang: painless + inline: "ctx._source.missing_length = ctx._source.missing?.length()" + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: "1" } + - match: { _version: 4 } + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: yyy } + - match: { _source.count: 1 } + - is_false: _source.missing + - is_false: _source.missing_length + + - do: + update: + index: test_1 + type: test + id: 1 + body: + script: + lang: painless + inline: "ctx._source.foo_length = ctx._source.foo?.length()" + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: "1" } + - match: { _version: 5 } + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: yyy } + - match: { _source.foo_length: 3 } + - match: { _source.count: 1 } + - is_false: _source.missing + - is_false: _source.missing_length + --- "Update Script with script error": - do: diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml index cf2e9eb4133..902c6950245 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml @@ -10,6 +10,8 @@ setup: properties: foo: type: keyword + missing: + type: keyword - do: index: index: test @@ -26,14 +28,42 @@ setup: body: script_fields: bar: - script: + script: inline: "doc['foo'].value + params.x;" - lang: painless params: x: "bbb" - match: { hits.hits.0.fields.bar.0: "aaabbb"} +--- +"Scripted Field with a null safe dereference (non-null)": + - do: + search: + body: + script_fields: + bar: + script: + inline: "doc['foo'].value?.length() + params.x;" + params: + x: 5 + + - match: { hits.hits.0.fields.bar.0: 8} + +--- +"Scripted Field with a null safe dereference (null)": + # Change this to ?: once we have it implemented + - do: + search: + body: + script_fields: + bar: + script: + inline: "(doc['missing'].value?.length() == null ? 0 : doc['missing'].value?.length()) + params.x;" + params: + x: 5 + + - match: { hits.hits.0.fields.bar.0: 5} + --- "Scripted Field with script error": - do: @@ -43,7 +73,6 @@ setup: script_fields: bar: script: - lang: painless inline: "while (true) {}" - match: { error.root_cause.0.type: "script_exception" } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateRequest.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateRequest.java index d8bb91f2fb0..3ecf3c273ea 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateRequest.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateRequest.java @@ -48,7 +48,7 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeSt * @deprecated Instead use multi search API with {@link PercolateQueryBuilder} */ @Deprecated -public class MultiPercolateRequest extends ActionRequest implements CompositeIndicesRequest { +public class MultiPercolateRequest extends ActionRequest implements CompositeIndicesRequest { private String[] indices; private String documentType; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequest.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequest.java index 98aaa891640..bc449ea932d 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequest.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequest.java @@ -44,7 +44,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; * @deprecated Instead use search API with {@link PercolateQueryBuilder} */ @Deprecated -public class PercolateRequest extends ActionRequest implements IndicesRequest.Replaceable { +public class PercolateRequest extends ActionRequest implements IndicesRequest.Replaceable { protected String[] indices; private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed(); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 8fcf40d8dcc..dd148e8d6d7 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -69,6 +69,7 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.function.Supplier; public class PercolatorFieldMapper extends FieldMapper { @@ -89,9 +90,9 @@ public class PercolatorFieldMapper extends FieldMapper { public static class Builder extends FieldMapper.Builder { - private final QueryShardContext queryShardContext; + private final Supplier queryShardContext; - public Builder(String fieldName, QueryShardContext queryShardContext) { + public Builder(String fieldName, Supplier queryShardContext) { super(fieldName, FIELD_TYPE, FIELD_TYPE); this.queryShardContext = queryShardContext; } @@ -136,7 +137,7 @@ public class PercolatorFieldMapper extends FieldMapper { @Override public Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - return new Builder(name, parserContext.queryShardContext()); + return new Builder(name, parserContext.queryShardContextSupplier()); } } @@ -222,23 +223,44 @@ public class PercolatorFieldMapper extends FieldMapper { } private final boolean mapUnmappedFieldAsString; - private final QueryShardContext queryShardContext; + private final Supplier queryShardContext; private KeywordFieldMapper queryTermsField; private KeywordFieldMapper extractionResultField; private BinaryFieldMapper queryBuilderField; public PercolatorFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo, QueryShardContext queryShardContext, + Settings indexSettings, MultiFields multiFields, CopyTo copyTo, + Supplier queryShardContext, KeywordFieldMapper queryTermsField, KeywordFieldMapper extractionResultField, BinaryFieldMapper queryBuilderField) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - this.queryShardContext = queryShardContext; + this.queryShardContext = new QueryShardContextSupplierCache(queryShardContext); this.queryTermsField = queryTermsField; this.extractionResultField = extractionResultField; this.queryBuilderField = queryBuilderField; this.mapUnmappedFieldAsString = INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings); } + private static class QueryShardContextSupplierCache implements Supplier { + private final Supplier supplier; + private volatile QueryShardContext context; + + QueryShardContextSupplierCache(Supplier supplier) { + this.supplier = supplier; + } + + @Override + public QueryShardContext get() { + QueryShardContext context = this.context; + if (context == null) { + context = this.context = supplier.get(); + } + // return a copy + return new QueryShardContext(context); + } + + } + @Override public FieldMapper updateFieldType(Map fullNameToFieldType) { PercolatorFieldMapper updated = (PercolatorFieldMapper) super.updateFieldType(fullNameToFieldType); @@ -261,7 +283,7 @@ public class PercolatorFieldMapper extends FieldMapper { @Override public Mapper parse(ParseContext context) throws IOException { - QueryShardContext queryShardContext = new QueryShardContext(this.queryShardContext); + QueryShardContext queryShardContext = this.queryShardContext.get(); if (context.doc().getField(queryBuilderField.name()) != null) { // If a percolator query has been defined in an array object then multiple percolator queries // could be provided. In order to prevent this we fail if we try to parse more than one query diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java index 8b602e3c478..d314de3b05f 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java @@ -47,7 +47,7 @@ public class PercolatorPlugin extends Plugin implements MapperPlugin, ActionPlug } @Override - public List, ? extends ActionResponse>> getActions() { + public List> getActions() { return Arrays.asList(new ActionHandler<>(PercolateAction.INSTANCE, TransportPercolateAction.class), new ActionHandler<>(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class)); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index d0ba6afbdab..665b9926a58 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -42,6 +42,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.function.Function; @@ -89,7 +90,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { ensureGreen(); client().prepareIndex("index", "type", "1") .setSource(jsonBuilder().startObject().field("query", QueryBuilders.scriptQuery( - new Script("1==1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))).endObject()) + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "1==1", Collections.emptyMap()))).endObject()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .execute().actionGet(); PercolateResponse response = preparePercolate(client()) diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java index d4d6a3a22ea..32e1dbb8486 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java @@ -34,7 +34,7 @@ import java.util.List; public class RankEvalPlugin extends Plugin implements ActionPlugin { @Override - public List, ? extends ActionResponse>> getActions() { + public List> getActions() { return Arrays.asList(new ActionHandler<>(RankEvalAction.INSTANCE, TransportRankEvalAction.class)); } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java index 8dbbef3b497..2f1cdf44f86 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java @@ -31,14 +31,19 @@ import java.io.IOException; * Instances of this class represent a complete precision at request. They encode a precision task including search intents and search * specifications to be executed subsequently. * */ -public class RankEvalRequest extends ActionRequest { +public class RankEvalRequest extends ActionRequest { /** The request data to use for evaluation. */ private RankEvalSpec task; @Override public ActionRequestValidationException validate() { - return null; // TODO + ActionRequestValidationException e = null; + if (task == null) { + e = new ActionRequestValidationException(); + e.addValidationError("missing ranking evaluation specification"); + } + return null; } /** diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java index 4e7f28ac901..bae54425017 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java @@ -39,8 +39,8 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; -import java.util.Objects; import java.util.Map; +import java.util.Objects; /** * This class defines a ranking evaluation task including an id, a collection of queries to evaluate and the evaluation metric. @@ -113,12 +113,12 @@ public class RankEvalSpec extends ToXContentToBytes implements Writeable { public void setSpecifications(Collection specifications) { this.ratedRequests = specifications; } - + /** Set the template to base test requests on. */ public void setTemplate(Script script) { this.template = script; } - + /** Returns the template to base test requests on. */ public Script getTemplate() { return this.template; @@ -138,7 +138,11 @@ public class RankEvalSpec extends ToXContentToBytes implements Writeable { } } , METRIC_FIELD); PARSER.declareObject(RankEvalSpec::setTemplate, (p, c) -> { + try { return Script.parse(p, c.getParseFieldMatcher(), "mustache"); + } catch (IOException ex) { + throw new ParsingException(p.getTokenLocation(), "error parsing rank request", ex); + } }, TEMPLATE_FIELD); PARSER.declareObjectArray(RankEvalSpec::setSpecifications, (p, c) -> { try { @@ -154,30 +158,20 @@ public class RankEvalSpec extends ToXContentToBytes implements Writeable { if (templated) { for (RatedRequest query_spec : spec.getSpecifications()) { - Map params = query_spec.getParams(); - Script scriptWithParams = new Script(spec.template.getScript(), spec.template.getType(), spec.template.getLang(), params); - String resolvedRequest = - ((BytesReference) - (context.getScriptService().executable(scriptWithParams, ScriptContext.Standard.SEARCH, params) - .run())) - .utf8ToString(); + Map params = query_spec.getParams(); + Script scriptWithParams = new Script(spec.template.getType(), spec.template.getLang(), spec.template.getIdOrCode(), params); + String resolvedRequest = ((BytesReference) (context.getScriptService() + .executable(scriptWithParams, ScriptContext.Standard.SEARCH).run())).utf8ToString(); try (XContentParser subParser = XContentFactory.xContent(resolvedRequest).createParser(resolvedRequest)) { - QueryParseContext parseContext = - new QueryParseContext( - context.getSearchRequestParsers().queryParsers, - subParser, - context.getParseFieldMatcher()); - SearchSourceBuilder templateResult = - SearchSourceBuilder.fromXContent( - parseContext, - context.getAggs(), - context.getSuggesters(), - context.getSearchExtParsers()); + QueryParseContext parseContext = new QueryParseContext(context.getSearchRequestParsers().queryParsers, subParser, + context.getParseFieldMatcher()); + SearchSourceBuilder templateResult = SearchSourceBuilder.fromXContent(parseContext, context.getAggs(), + context.getSuggesters(), context.getSearchExtParsers()); query_spec.setTestRequest(templateResult); } } } - return spec; + return spec; } @Override diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedRequest.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedRequest.java index 4ac76877816..6dc53244a6d 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedRequest.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedRequest.java @@ -35,11 +35,10 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Objects; -import java.util.Set; - import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; +import java.util.Set; /** * Defines a QA specification: All end user supplied query intents will be mapped to the search request specified in this search request @@ -57,7 +56,7 @@ public class RatedRequest extends ToXContentToBytes implements Writeable { /** Collection of rated queries for this query QA specification.*/ private List ratedDocs = new ArrayList<>(); /** Map of parameters to use for filling a query template, can be used instead of providing testRequest. */ - private Map params = new HashMap<>(); + private Map params = new HashMap<>(); public RatedRequest() { // ctor that doesn't require all args to be present immediatly is easier to use with ObjectParser @@ -91,7 +90,7 @@ public class RatedRequest extends ToXContentToBytes implements Writeable { for (int i = 0; i < intentSize; i++) { ratedDocs.add(new RatedDocument(in)); } - this.params = (Map) in.readMap(); + this.params = in.readMap(); int summaryFieldsSize = in.readInt(); summaryFields = new ArrayList<>(summaryFieldsSize); for (int i = 0; i < summaryFieldsSize; i++) { @@ -115,7 +114,7 @@ public class RatedRequest extends ToXContentToBytes implements Writeable { for (RatedDocument ratedDoc : ratedDocs) { ratedDoc.writeTo(out); } - out.writeMap((Map) params); + out.writeMap(params); out.writeInt(summaryFields.size()); for (String fieldName : summaryFields) { out.writeString(fieldName); @@ -176,12 +175,12 @@ public class RatedRequest extends ToXContentToBytes implements Writeable { } this.ratedDocs = ratedDocs; } - - public void setParams(Map params) { + + public void setParams(Map params) { this.params = params; } - - public Map getParams() { + + public Map getParams() { return this.params; } @@ -259,7 +258,7 @@ public class RatedRequest extends ToXContentToBytes implements Writeable { if (testRequest != null) builder.field(REQUEST_FIELD.getPreferredName(), this.testRequest); builder.startObject(PARAMS_FIELD.getPreferredName()); - for (Entry entry : this.params.entrySet()) { + for (Entry entry : this.params.entrySet()) { builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java index b7a6746d965..62155d8bcfd 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.Script; @@ -113,12 +112,7 @@ public class RankEvalSpecTests extends ESTestCase { script = randomAsciiOfLengthBetween(1, 5); } - testItem.setTemplate(new Script( - script, - scriptType, - randomFrom("_lang1", "_lang2", null), - params, - scriptType == ScriptType.INLINE ? XContentType.JSON : null)); + testItem.setTemplate(new Script(scriptType, randomFrom("_lang1", "_lang2", null), script, params)); } XContentBuilder shuffled = ESTestCase.shuffleXContent(testItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java index 19471c20d09..acd0c1ccb3e 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java @@ -38,8 +38,8 @@ import org.junit.BeforeClass; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -86,7 +86,7 @@ public class RatedRequestsTests extends ESTestCase { if (randomBoolean()) { - Map params = new HashMap(); + Map params = new HashMap<>(); int randomSize = randomIntBetween(1, 10); for (int i = 0; i < randomSize; i++) { params.put(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java index fba0cb20f99..bfdd95cddfe 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java @@ -39,7 +39,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; public abstract class AbstractBulkByScrollRequest> - extends ActionRequest { + extends ActionRequest { public static final int SIZE_ALL_MATCHES = -1; private static final TimeValue DEFAULT_SCROLL_TIMEOUT = timeValueMinutes(5); private static final int DEFAULT_SCROLL_SIZE = 1000; @@ -424,7 +424,7 @@ public abstract class AbstractBulkByScrollRequest 1) { - throw new UnsupportedOperationException("Attempting to send sliced reindex-style request to a node that doesn't support " + throw new IllegalArgumentException("Attempting to send sliced reindex-style request to a node that doesn't support " + "it. Version is [" + out.getVersion() + "] but must be [" + BulkByScrollTask.V_5_1_0_UNRELEASED + "]"); } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index 4d5f7623400..7f7ae52b73b 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -197,9 +197,9 @@ public class ClientScrollableHitSource extends ScrollableHitSource { private final SearchHit delegate; private final BytesReference source; - public ClientHit(SearchHit delegate) { + ClientHit(SearchHit delegate) { this.delegate = delegate; - source = delegate.hasSource() ? null : delegate.getSourceRef(); + source = delegate.hasSource() ? delegate.getSourceRef() : null; } @Override diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java index fe7bcb1f85b..707da4fe5da 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java @@ -37,7 +37,7 @@ public class ReindexPlugin extends Plugin implements ActionPlugin { public static final String NAME = "reindex"; @Override - public List, ? extends ActionResponse>> getActions() { + public List> getActions() { return Arrays.asList(new ActionHandler<>(ReindexAction.INSTANCE, TransportReindexAction.class), new ActionHandler<>(UpdateByQueryAction.INSTANCE, TransportUpdateByQueryAction.class), new ActionHandler<>(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class), diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index be6f91002f5..8201f05f9ec 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -29,17 +29,18 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.script.Script; -import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchRequestParsers; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.function.Consumer; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.script.Script.DEFAULT_SCRIPT_LANG; public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { @@ -80,19 +81,19 @@ public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler config, ParseFieldMatcher parseFieldMatcher) { String script = null; ScriptType type = null; - String lang = null; - Map params = null; + String lang = DEFAULT_SCRIPT_LANG; + Map params = Collections.emptyMap(); for (Iterator> itr = config.entrySet().iterator(); itr.hasNext();) { Map.Entry entry = itr.next(); String parameterName = entry.getKey(); Object parameterValue = entry.getValue(); - if (parseFieldMatcher.match(parameterName, ScriptField.LANG)) { + if (parseFieldMatcher.match(parameterName, Script.LANG_PARSE_FIELD)) { if (parameterValue instanceof String || parameterValue == null) { lang = (String) parameterValue; } else { throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]"); } - } else if (parseFieldMatcher.match(parameterName, ScriptField.PARAMS)) { + } else if (parseFieldMatcher.match(parameterName, Script.PARAMS_PARSE_FIELD)) { if (parameterValue instanceof Map || parameterValue == null) { params = (Map) parameterValue; } else { @@ -127,6 +128,7 @@ public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler> T applyScript(Consumer> scriptBody) { + protected T applyScript(Consumer> scriptBody) { IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0); ExecutableScript executableScript = new SimpleExecutableScript(scriptBody); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 3640e2253e4..f78ab7514f3 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -726,7 +726,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { @Override @SuppressWarnings("unchecked") - protected , Response extends ActionResponse, + protected > void doExecute( Action action, Request request, ActionListener listener) { lastHeaders.set(threadPool.getThreadContext().getHeaders()); @@ -823,7 +823,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { } } - private static class RequestAndListener, Response> { + private static class RequestAndListener { private final Request request; private final ActionListener listener; diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java index b4ac273b43b..27955f71f92 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java @@ -165,7 +165,7 @@ public class ReindexFromRemoteWithAuthTests extends ESSingleNodeTestCase { } @Override - public , Response extends ActionResponse> void apply(Task task, String action, + public void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain) { if (false == action.equals(SearchAction.NAME)) { chain.proceed(task, action, request, listener); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java index 3d90ff37646..afc08ed0587 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java @@ -29,9 +29,12 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; /** @@ -81,7 +84,26 @@ public class RethrottleTests extends ReindexTestCase { request.source().setSize(1); // Make sure we use multiple batches ListenableActionFuture responseListener = request.execute(); - TaskId taskToRethrottle = findTaskToRethrottle(actionName, request.request().getSlices()); + TaskGroup taskGroupToRethrottle = findTaskToRethrottle(actionName, request.request().getSlices()); + TaskId taskToRethrottle = taskGroupToRethrottle.getTaskInfo().getTaskId(); + + if (request.request().getSlices() == 1) { + assertThat(taskGroupToRethrottle.getChildTasks(), empty()); + } else { + // There should be a sane number of child tasks running + assertThat(taskGroupToRethrottle.getChildTasks(), + hasSize(allOf(greaterThanOrEqualTo(1), lessThanOrEqualTo(request.request().getSlices())))); + // Wait for all of the sub tasks to start (or finish, some might finish early, all that matters is that not all do) + assertBusy(() -> { + BulkByScrollTask.Status parent = (BulkByScrollTask.Status) client().admin().cluster().prepareGetTask(taskToRethrottle).get() + .getTask().getTask().getStatus(); + long finishedSubTasks = parent.getSliceStatuses().stream().filter(s -> s != null).count(); + ListTasksResponse list = client().admin().cluster().prepareListTasks().setParentTaskId(taskToRethrottle).get(); + list.rethrowFailures("subtasks"); + assertThat(finishedSubTasks + list.getTasks().size(), greaterThanOrEqualTo((long) request.request().getSlices())); + assertThat(list.getTasks().size(), greaterThan(0)); + }); + } // Now rethrottle it so it'll finish float newRequestsPerSecond = randomBoolean() ? Float.POSITIVE_INFINITY : between(1, 1000) * 100000; // No throttle or "very fast" @@ -134,26 +156,23 @@ public class RethrottleTests extends ReindexTestCase { response.getBatches(), greaterThanOrEqualTo(request.request().getSlices())); } - private TaskId findTaskToRethrottle(String actionName, int sliceCount) { - ListTasksResponse tasks; + private TaskGroup findTaskToRethrottle(String actionName, int sliceCount) { long start = System.nanoTime(); do { - tasks = client().admin().cluster().prepareListTasks().setActions(actionName).setDetailed(true).get(); + ListTasksResponse tasks = client().admin().cluster().prepareListTasks().setActions(actionName).setDetailed(true).get(); tasks.rethrowFailures("Finding tasks to rethrottle"); - for (TaskGroup taskGroup : tasks.getTaskGroups()) { - if (sliceCount == 1) { - assertThat(taskGroup.getChildTasks(), empty()); - } else { - if (taskGroup.getChildTasks().stream().noneMatch(t -> - ((BulkByScrollTask.Status) t.getTaskInfo().getStatus()).getTotal() > 0)) { - // Need to wait until a child is running that is non-empty so we can rethrottle it - continue; - } - assertThat(taskGroup.getChildTasks(), hasSize(lessThanOrEqualTo(sliceCount))); - } - return taskGroup.getTaskInfo().getTaskId(); + assertThat(tasks.getTaskGroups(), hasSize(lessThan(2))); + if (0 == tasks.getTaskGroups().size()) { + continue; } + TaskGroup taskGroup = tasks.getTaskGroups().get(0); + if (sliceCount != 1 && taskGroup.getChildTasks().size() == 0) { + // If there are child tasks wait for at least one to start + continue; + } + return taskGroup; } while (System.nanoTime() - start < TimeUnit.SECONDS.toNanos(10)); - throw new AssertionError("Couldn't find task to rethrottle after waiting tasks=" + tasks.getTasks()); + throw new AssertionError("Couldn't find tasks to rethrottle. Here are the running tasks " + + client().admin().cluster().prepareListTasks().get()); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index 97e228ccfd4..38bc20c4891 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -81,7 +82,7 @@ public class RoundTripTests extends ESTestCase { // Try slices with a version that doesn't support slices. That should fail. reindex.setSlices(between(2, 1000)); - Exception e = expectThrows(UnsupportedOperationException.class, () -> roundTrip(Version.V_5_0_0_rc1, reindex, null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_5_0_0_rc1, reindex, null)); assertEquals("Attempting to send sliced reindex-style request to a node that doesn't support it. " + "Version is [5.0.0-rc1] but must be [5.1.0]", e.getMessage()); @@ -105,7 +106,7 @@ public class RoundTripTests extends ESTestCase { // Try slices with a version that doesn't support slices. That should fail. update.setSlices(between(2, 1000)); - Exception e = expectThrows(UnsupportedOperationException.class, () -> roundTrip(Version.V_5_0_0_rc1, update, null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_5_0_0_rc1, update, null)); assertEquals("Attempting to send sliced reindex-style request to a node that doesn't support it. " + "Version is [5.0.0-rc1] but must be [5.1.0]", e.getMessage()); @@ -126,7 +127,7 @@ public class RoundTripTests extends ESTestCase { // Try slices with a version that doesn't support slices. That should fail. delete.setSlices(between(2, 1000)); - Exception e = expectThrows(UnsupportedOperationException.class, () -> roundTrip(Version.V_5_0_0_rc1, delete, null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_5_0_0_rc1, delete, null)); assertEquals("Attempting to send sliced reindex-style request to a node that doesn't support it. " + "Version is [5.0.0-rc1] but must be [5.1.0]", e.getMessage()); @@ -307,10 +308,12 @@ public class RoundTripTests extends ESTestCase { } private Script randomScript() { - return new Script(randomSimpleString(random()), // Name - randomFrom(ScriptType.values()), // Type - random().nextBoolean() ? null : randomSimpleString(random()), // Language - emptyMap()); // Params + ScriptType type = randomFrom(ScriptType.values()); + String lang = random().nextBoolean() ? Script.DEFAULT_SCRIPT_LANG : randomSimpleString(random()); + String idOrCode = randomSimpleString(random()); + Map params = Collections.emptyMap(); + + return new Script(type, lang, idOrCode, params); } private void assertResponseEquals(BulkIndexByScrollResponse expected, BulkIndexByScrollResponse actual) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index 3cc8c3c5e6f..5c2278f59eb 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -439,16 +439,15 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class); when(httpClient.execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class), any(FutureCallback.class))).then(new Answer>() { - @Override - public Future answer(InvocationOnMock invocationOnMock) throws Throwable { - HeapBufferedAsyncResponseConsumer consumer = (HeapBufferedAsyncResponseConsumer) invocationOnMock.getArguments()[1]; - FutureCallback callback = (FutureCallback) invocationOnMock.getArguments()[2]; - - assertEquals(new ByteSizeValue(200, ByteSizeUnit.MB).bytesAsInt(), consumer.getBufferLimit()); - callback.failed(tooLong); - return null; - } - }); + @Override + public Future answer(InvocationOnMock invocationOnMock) throws Throwable { + HeapBufferedAsyncResponseConsumer consumer = (HeapBufferedAsyncResponseConsumer) invocationOnMock.getArguments()[1]; + FutureCallback callback = (FutureCallback) invocationOnMock.getArguments()[2]; + assertEquals(new ByteSizeValue(100, ByteSizeUnit.MB).bytesAsInt(), consumer.getBufferLimit()); + callback.failed(tooLong); + return null; + } + }); RemoteScrollableHitSource source = sourceWithMockedClient(true, httpClient); AtomicBoolean called = new AtomicBoolean(); diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.3.0-snapshot-a66a445.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 29114cfcf70..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -39e5761c8209a6e4e940a3aec4ba57a6b631ca00 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.3.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.3.0.jar.sha1 new file mode 100644 index 00000000000..22ff33fee84 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.3.0.jar.sha1 @@ -0,0 +1 @@ +467d808656db028faa3cbc86d386dbf6164a835c \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.3.0-snapshot-a66a445.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 2ec23fb8b2d..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c4230c40a10cbb4ad54bcbe9e4265ecb598a4c25 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.3.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.3.0.jar.sha1 new file mode 100644 index 00000000000..13226a0d4be --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.3.0.jar.sha1 @@ -0,0 +1 @@ +bea02277bff7fa0f4d93e6abca94eaf0eec9c84f \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.3.0-snapshot-a66a445.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 27a5a67a55a..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ccd0636f0df42146b5c77cac5ec57739c9ff2893 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.3.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.3.0.jar.sha1 new file mode 100644 index 00000000000..5a57464512f --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.3.0.jar.sha1 @@ -0,0 +1 @@ +657a1409f539b4a20b5487496a8e4471b33902fd \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.3.0-snapshot-a66a445.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index a70cf1ae74f..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -17b3d2f5ffd58756b6d5bdc651eb2ea461885d0a \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.3.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.3.0.jar.sha1 new file mode 100644 index 00000000000..eab2257293c --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.3.0.jar.sha1 @@ -0,0 +1 @@ +47792194b04e8cd61c3667da50a38adae257b19a \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.3.0-snapshot-a66a445.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 466578a5e24..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d3d540a7225837e25cc0ed02aefb0c7763e0f832 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.3.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.3.0.jar.sha1 new file mode 100644 index 00000000000..bba7a9bc273 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.3.0.jar.sha1 @@ -0,0 +1 @@ +bcf535520b92821cf04486031214d35d7405571c \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.3.0-snapshot-a66a445.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.3.0-snapshot-a66a445.jar.sha1 deleted file mode 100644 index 5ad5644d679..00000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.3.0-snapshot-a66a445.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7e711a007cd1588f8118eb02803381d448ae087c \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.3.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.3.0.jar.sha1 new file mode 100644 index 00000000000..e136d57854a --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.3.0.jar.sha1 @@ -0,0 +1 @@ +82ed82174fae75f93741b8418046bc94e50434f8 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 88874968b21..c2d004bab4c 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -67,7 +67,7 @@ task createKey(type: LoggedExec) { project.delete(keystore.parentFile) keystore.parentFile.mkdirs() } - executable = 'keytool' + executable = new File(project.javaHome, 'bin/keytool') standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) args '-genkey', '-alias', 'test-node', diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceImpl.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceImpl.java index 487a0ee5ce0..09519b14499 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceImpl.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceImpl.java @@ -32,7 +32,9 @@ import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDeta import com.microsoft.windowsazure.management.configuration.ManagementConfiguration; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cloud.azure.classic.AzureServiceRemoteException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; public class AzureComputeServiceImpl extends AbstractLifecycleComponent @@ -43,11 +45,11 @@ public class AzureComputeServiceImpl extends AbstractLifecycleComponent public AzureComputeServiceImpl(Settings settings) { super(settings); - String subscriptionId = Management.SUBSCRIPTION_ID_SETTING.get(settings); + String subscriptionId = getRequiredSetting(settings, Management.SUBSCRIPTION_ID_SETTING); - serviceName = Management.SERVICE_NAME_SETTING.get(settings); - String keystorePath = Management.KEYSTORE_PATH_SETTING.get(settings); - String keystorePassword = Management.KEYSTORE_PASSWORD_SETTING.get(settings); + serviceName = getRequiredSetting(settings, Management.SERVICE_NAME_SETTING); + String keystorePath = getRequiredSetting(settings, Management.KEYSTORE_PATH_SETTING); + String keystorePassword = getRequiredSetting(settings, Management.KEYSTORE_PASSWORD_SETTING); KeyStoreType keystoreType = Management.KEYSTORE_TYPE_SETTING.get(settings); logger.trace("creating new Azure client for [{}], [{}]", subscriptionId, serviceName); @@ -77,6 +79,14 @@ public class AzureComputeServiceImpl extends AbstractLifecycleComponent } } + private static String getRequiredSetting(Settings settings, Setting setting) { + String value = setting.get(settings); + if (value == null || Strings.hasLength(value) == false) { + throw new IllegalArgumentException("Missing required setting " + setting.getKey() + " for azure"); + } + return value; + } + @Override public HostedServiceGetDetailedResponse getServiceDetails() { try { diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java index b26643adce0..9735b83d9e6 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java @@ -29,29 +29,33 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cloud.azure.classic.management.AzureComputeService; import org.elasticsearch.cloud.azure.classic.management.AzureComputeServiceImpl; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.azure.classic.AzureUnicastHostsProvider; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; +import org.elasticsearch.discovery.zen.ZenPing; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; public class AzureDiscoveryPlugin extends Plugin implements DiscoveryPlugin { public static final String AZURE = "azure"; protected final Settings settings; - protected final Logger logger = Loggers.getLogger(AzureDiscoveryPlugin.class); + private static final Logger logger = Loggers.getLogger(AzureDiscoveryPlugin.class); + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); public AzureDiscoveryPlugin(Settings settings) { this.settings = settings; - DeprecationLogger deprecationLogger = new DeprecationLogger(logger); deprecationLogger.deprecated("azure classic discovery plugin is deprecated. Use azure arm discovery plugin instead"); logger.trace("starting azure classic discovery plugin..."); } @@ -68,10 +72,12 @@ public class AzureDiscoveryPlugin extends Plugin implements DiscoveryPlugin { () -> new AzureUnicastHostsProvider(settings, createComputeService(), transportService, networkService)); } - public void onModule(DiscoveryModule discoveryModule) { - if (isDiscoveryReady(settings, logger)) { - discoveryModule.addDiscoveryType(AZURE, ZenDiscovery.class); - } + @Override + public Map> getDiscoveryTypes(ThreadPool threadPool, TransportService transportService, + ClusterService clusterService, UnicastHostsProvider hostsProvider) { + // this is for backcompat with pre 5.1, where users would set discovery.type to use ec2 hosts provider + return Collections.singletonMap(AZURE, () -> + new ZenDiscovery(settings, threadPool, transportService, clusterService, hostsProvider)); } @Override @@ -88,36 +94,19 @@ public class AzureDiscoveryPlugin extends Plugin implements DiscoveryPlugin { AzureComputeService.Discovery.ENDPOINT_NAME_SETTING); } - /** - * Check if discovery is meant to start - * @return true if we can start discovery features - */ - private static boolean isDiscoveryReady(Settings settings, Logger logger) { - // User set discovery.type: azure - if (!AzureDiscoveryPlugin.AZURE.equalsIgnoreCase(DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings))) { - logger.trace("discovery.type not set to {}", AzureDiscoveryPlugin.AZURE); - return false; - } - - if (isDefined(settings, AzureComputeService.Management.SUBSCRIPTION_ID_SETTING) && - isDefined(settings, AzureComputeService.Management.SERVICE_NAME_SETTING) && - isDefined(settings, AzureComputeService.Management.KEYSTORE_PATH_SETTING) && - isDefined(settings, AzureComputeService.Management.KEYSTORE_PASSWORD_SETTING)) { - logger.trace("All required properties for Azure discovery are set!"); - return true; - } else { - logger.debug("One or more Azure discovery settings are missing. " + - "Check elasticsearch.yml file. Should have [{}], [{}], [{}] and [{}].", - AzureComputeService.Management.SUBSCRIPTION_ID_SETTING.getKey(), - AzureComputeService.Management.SERVICE_NAME_SETTING.getKey(), - AzureComputeService.Management.KEYSTORE_PATH_SETTING.getKey(), - AzureComputeService.Management.KEYSTORE_PASSWORD_SETTING.getKey()); - return false; + @Override + public Settings additionalSettings() { + // For 5.0, the hosts provider was "zen", but this was before the discovery.zen.hosts_provider + // setting existed. This check looks for the legacy setting, and sets hosts provider if set + String discoveryType = DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings); + if (discoveryType.equals(AZURE)) { + deprecationLogger.deprecated("Using " + DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey() + + " setting to set hosts provider is deprecated. " + + "Set \"" + DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey() + ": " + AZURE + "\" instead"); + if (DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.exists(settings) == false) { + return Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), AZURE).build(); + } } + return Settings.EMPTY; } - - private static boolean isDefined(Settings settings, Setting property) throws ElasticsearchException { - return (property.exists(settings) && Strings.hasText(property.get(settings))); - } - } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java index 58cabf1d9d6..6d367e21679 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java @@ -44,22 +44,28 @@ import org.elasticsearch.SpecialPermission; import org.elasticsearch.cloud.aws.AwsEc2Service; import org.elasticsearch.cloud.aws.AwsEc2ServiceImpl; import org.elasticsearch.cloud.aws.network.Ec2NameResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.ec2.AwsEc2UnicastHostsProvider; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; +import org.elasticsearch.discovery.zen.ZenPing; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Closeable { private static Logger logger = Loggers.getLogger(Ec2DiscoveryPlugin.class); + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); public static final String EC2 = "ec2"; @@ -93,8 +99,12 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close this.settings = settings; } - public void onModule(DiscoveryModule discoveryModule) { - discoveryModule.addDiscoveryType(EC2, ZenDiscovery.class); + @Override + public Map> getDiscoveryTypes(ThreadPool threadPool, TransportService transportService, + ClusterService clusterService, UnicastHostsProvider hostsProvider) { + // this is for backcompat with pre 5.1, where users would set discovery.type to use ec2 hosts provider + return Collections.singletonMap(EC2, () -> + new ZenDiscovery(settings, threadPool, transportService, clusterService, hostsProvider)); } @Override @@ -147,10 +157,25 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close AwsEc2Service.AUTO_ATTRIBUTE_SETTING); } - /** Adds a node attribute for the ec2 availability zone. */ @Override public Settings additionalSettings() { - return getAvailabilityZoneNodeAttributes(settings, AwsEc2ServiceImpl.EC2_METADATA_URL + "placement/availability-zone"); + Settings.Builder builder = Settings.builder(); + // For 5.0, discovery.type was used prior to the new discovery.zen.hosts_provider + // setting existed. This check looks for the legacy setting, and sets hosts provider if set + String discoveryType = DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings); + if (discoveryType.equals(EC2)) { + deprecationLogger.deprecated("Using " + DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey() + + " setting to set hosts provider is deprecated. " + + "Set \"" + DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey() + ": " + EC2 + "\" instead"); + if (DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.exists(settings) == false) { + builder.put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), EC2).build(); + } + } + + // Adds a node attribute for the ec2 availability zone + String azMetadataUrl = AwsEc2ServiceImpl.EC2_METADATA_URL + "placement/availability-zone"; + builder.put(getAvailabilityZoneNodeAttributes(settings, azMetadataUrl)); + return builder.build(); } // pkg private for testing diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java index d992dc1d642..d93725a03c3 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java @@ -62,13 +62,13 @@ public class FileBasedDiscoveryPlugin extends Plugin implements DiscoveryPlugin // For 5.0, the hosts provider was "zen", but this was before the discovery.zen.hosts_provider // setting existed. This check looks for the legacy zen, and sets the file hosts provider if not set String discoveryType = DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings); - // look at hosts provider setting to avoid fallback as default - String hostsProvider = settings.get(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey()); - if (hostsProvider == null && discoveryType.equals("zen")) { + if (discoveryType.equals("zen")) { deprecationLogger.deprecated("Using " + DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey() + " setting to set hosts provider is deprecated. " + "Set \"" + DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey() + ": file\" instead"); - return Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "file").build(); + if (DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.exists(settings) == false) { + return Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "file").build(); + } } return Settings.EMPTY; } diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index bbd2221d8e0..ede168e1f9d 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -35,7 +35,7 @@ task createKey(type: LoggedExec) { project.delete(keystore.parentFile) keystore.parentFile.mkdirs() } - executable = 'keytool' + executable = new File(project.javaHome, 'bin/keytool') standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) args '-genkey', '-alias', 'test-node', diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java index be06c5b8866..f53abc4241c 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java @@ -30,18 +30,23 @@ import org.elasticsearch.cloud.gce.GceInstancesServiceImpl; import org.elasticsearch.cloud.gce.GceMetadataService; import org.elasticsearch.cloud.gce.GceModule; import org.elasticsearch.cloud.gce.network.GceNameResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.gce.GceUnicastHostsProvider; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; +import org.elasticsearch.discovery.zen.ZenPing; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.Closeable; @@ -60,7 +65,8 @@ public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close public static final String GCE = "gce"; private final Settings settings; - protected final Logger logger = Loggers.getLogger(GceDiscoveryPlugin.class); + private static final Logger logger = Loggers.getLogger(GceDiscoveryPlugin.class); + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); // stashed when created in order to properly close private final SetOnce gceInstancesService = new SetOnce<>(); @@ -91,9 +97,12 @@ public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close logger.trace("starting gce discovery plugin..."); } - public void onModule(DiscoveryModule discoveryModule) { - logger.debug("Register gce discovery type and gce unicast provider"); - discoveryModule.addDiscoveryType(GCE, ZenDiscovery.class); + @Override + public Map> getDiscoveryTypes(ThreadPool threadPool, TransportService transportService, + ClusterService clusterService, UnicastHostsProvider hostsProvider) { + // this is for backcompat with pre 5.1, where users would set discovery.type to use ec2 hosts provider + return Collections.singletonMap(GCE, () -> + new ZenDiscovery(settings, threadPool, transportService, clusterService, hostsProvider)); } @Override @@ -123,6 +132,22 @@ public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close GceInstancesService.MAX_WAIT_SETTING); } + @Override + public Settings additionalSettings() { + // For 5.0, the hosts provider was "zen", but this was before the discovery.zen.hosts_provider + // setting existed. This check looks for the legacy setting, and sets hosts provider if set + String discoveryType = DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings); + if (discoveryType.equals(GCE)) { + deprecationLogger.deprecated("Using " + DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey() + + " setting to set hosts provider is deprecated. " + + "Set \"" + DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey() + ": " + GCE + "\" instead"); + if (DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.exists(settings) == false) { + return Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), GCE).build(); + } + } + return Settings.EMPTY; + } + @Override public void close() throws IOException { IOUtils.close(gceInstancesService.get()); diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 2190036c7fc..6d5af71aa5b 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -46,7 +46,7 @@ public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable @Override public Map getProcessors(Processor.Parameters parameters) { if (databaseReaders != null) { - throw new IllegalStateException("called onModule twice for geoip plugin!!"); + throw new IllegalStateException("getProcessors called twice for geoip plugin!!"); } Path geoIpConfigDirectory = parameters.env.configFile().resolve("ingest-geoip"); try { diff --git a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java index ef3302fabcb..ac76858d110 100644 --- a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java +++ b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java @@ -60,9 +60,6 @@ public class JvmExamplePlugin extends Plugin { return Settings.EMPTY; } - public void onModule(RepositoriesModule repositoriesModule) { - } - /** * Module declaring some example configuration and a _cat action that uses * it. diff --git a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java index 946a15755c8..6af04561e4b 100644 --- a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java +++ b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java @@ -25,7 +25,6 @@ import org.elasticsearch.SpecialPermission; import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.CompiledScript; diff --git a/qa/backwards-5.0/build.gradle b/qa/backwards-5.0/build.gradle index 7cb7d7a1a2a..bb5d69d32bb 100644 --- a/qa/backwards-5.0/build.gradle +++ b/qa/backwards-5.0/build.gradle @@ -18,7 +18,13 @@ integTest { cluster { numNodes = 2 numBwcNodes = 1 - bwcVersion = "6.0.0-alpha1-SNAPSHOT" // this is the same as the current version until we released the first RC + bwcVersion = "5.1.0-SNAPSHOT" setting 'logger.org.elasticsearch', 'DEBUG' } } + +repositories { + maven { + url "https://oss.sonatype.org/content/repositories/snapshots/" + } +} diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index 34621802f55..ca2575901bc 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -19,6 +19,11 @@ package org.elasticsearch.tribe; +import java.io.IOException; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.List; + import org.apache.lucene.util.IOUtils; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; @@ -34,16 +39,11 @@ import org.elasticsearch.node.NodeValidationException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.discovery.MockZenPing; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.transport.MockTcpTransportPlugin; import org.junit.AfterClass; import org.junit.BeforeClass; -import java.io.IOException; -import java.nio.file.Path; -import java.util.Arrays; -import java.util.List; - import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.equalTo; @@ -68,7 +68,7 @@ public class TribeUnitTests extends ESTestCase { .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), 2) .build(); - final List> mockPlugins = Arrays.asList(MockTcpTransportPlugin.class, MockZenPing.TestPlugin.class); + final List> mockPlugins = Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class); tribe1 = new MockNode( Settings.builder() .put(baseSettings) @@ -110,7 +110,7 @@ public class TribeUnitTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(extraSettings).build(); - try (Node node = new MockNode(settings, Arrays.asList(MockTcpTransportPlugin.class, MockZenPing.TestPlugin.class)).start()) { + try (Node node = new MockNode(settings, Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class)).start()) { try (Client client = node.client()) { assertBusy(() -> { ClusterState state = client.admin().cluster().prepareState().clear().setNodes(true).get().getState(); diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index f90763a12dd..d88ef4b74e4 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -25,12 +25,8 @@ task oldClusterTest(type: RestIntegTestTask) { mustRunAfter(precommit) cluster { distribution = 'zip' - // TODO: Right now, this just forms a cluster with the current version of ES, - // because we don't support clusters with nodes on different alpha/beta releases of ES. - // When the GA is released, we should change the bwcVersion to 5.0.0 and uncomment - // numBwcNodes = 2 - //bwcVersion = '5.0.0-alpha5' // TODO: either randomize, or make this settable with sysprop - //numBwcNodes = 2 + bwcVersion = '5.1.0-SNAPSHOT' // TODO: either randomize, or make this settable with sysprop + numBwcNodes = 1 numNodes = 2 clusterName = 'rolling-upgrade' } @@ -69,3 +65,9 @@ task integTest { test.enabled = false // no unit tests for rolling upgrades, only the rest integration test check.dependsOn(integTest) + +repositories { + maven { + url "https://oss.sonatype.org/content/repositories/snapshots/" + } +} diff --git a/qa/smoke-test-tribe-node/build.gradle b/qa/smoke-test-tribe-node/build.gradle new file mode 100644 index 00000000000..6e108e87043 --- /dev/null +++ b/qa/smoke-test-tribe-node/build.gradle @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.gradle.test.ClusterConfiguration +import org.elasticsearch.gradle.test.ClusterFormationTasks +import org.elasticsearch.gradle.test.NodeInfo + +apply plugin: 'elasticsearch.rest-test' + +List oneNodes + +task setupClusterOne(type: DefaultTask) { + mustRunAfter(precommit) + ClusterConfiguration configOne = new ClusterConfiguration(project) + configOne.clusterName = 'one' + configOne.setting('node.name', 'one') + oneNodes = ClusterFormationTasks.setup(project, setupClusterOne, configOne) +} + +List twoNodes + +task setupClusterTwo(type: DefaultTask) { + mustRunAfter(precommit) + ClusterConfiguration configTwo = new ClusterConfiguration(project) + configTwo.clusterName = 'two' + configTwo.setting('node.name', 'two') + twoNodes = ClusterFormationTasks.setup(project, setupClusterTwo, configTwo) +} + +integTest { + dependsOn(setupClusterOne, setupClusterTwo) + cluster { + // tribe nodes had a bug where if explicit ports was specified for the tribe node, the dynamic socket permissions that were applied + // would not account for the fact that the internal node client needed to bind to sockets too; thus, we use explicit port ranges to + // ensure that the code that fixes this bug is exercised + setting 'http.port', '40200-40249' + setting 'transport.tcp.port', '40300-40349' + setting 'node.name', 'quest' + setting 'tribe.one.cluster.name', 'one' + setting 'tribe.one.discovery.zen.ping.unicast.hosts', "'${-> oneNodes.get(0).transportUri()}'" + setting 'tribe.one.http.enabled', 'true' + setting 'tribe.one.http.port', '40250-40299' + setting 'tribe.one.transport.tcp.port', '40350-40399' + setting 'tribe.two.cluster.name', 'two' + setting 'tribe.two.discovery.zen.ping.unicast.hosts', "'${-> twoNodes.get(0).transportUri()}'" + setting 'tribe.two.http.enabled', 'true' + setting 'tribe.two.http.port', '40250-40299' + setting 'tribe.two.transport.tcp.port', '40250-40399' + } + // need to kill the standalone nodes here + finalizedBy 'setupClusterOne#stop' + finalizedBy 'setupClusterTwo#stop' +} diff --git a/qa/smoke-test-tribe-node/src/test/java/org/elasticsearch/tribe/TribeClientYamlTestSuiteIT.java b/qa/smoke-test-tribe-node/src/test/java/org/elasticsearch/tribe/TribeClientYamlTestSuiteIT.java new file mode 100644 index 00000000000..6013913bdc4 --- /dev/null +++ b/qa/smoke-test-tribe-node/src/test/java/org/elasticsearch/tribe/TribeClientYamlTestSuiteIT.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tribe; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; + +import java.io.IOException; + +public class TribeClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + + // tribe nodes can not handle delete indices requests + @Override + protected boolean preserveIndicesUponCompletion() { + return true; + } + + // tribe nodes can not handle delete template requests + @Override + protected boolean preserveTemplatesUponCompletion() { + return true; + } + + public TribeClientYamlTestSuiteIT(@Name("yaml") final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, ClientYamlTestParseException { + return createParameters(); + } + +} diff --git a/qa/smoke-test-tribe-node/src/test/resources/rest-api-spec/test/tribe/10_basic.yaml b/qa/smoke-test-tribe-node/src/test/resources/rest-api-spec/test/tribe/10_basic.yaml new file mode 100644 index 00000000000..d70a355ac62 --- /dev/null +++ b/qa/smoke-test-tribe-node/src/test/resources/rest-api-spec/test/tribe/10_basic.yaml @@ -0,0 +1,16 @@ +--- +"Tribe node test": + - do: + cat.nodes: + h: name + s: name + v: true + + - match: + $body: | + /^ name\n + one\n + quest\n + quest/one\n + quest/two\n + two\n $/ diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index f263adbadc8..b30ea329f10 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -17,271 +17,13 @@ * under the License. */ -import org.elasticsearch.gradle.FileContentsTask -import org.elasticsearch.gradle.vagrant.BatsOverVagrantTask -import org.elasticsearch.gradle.vagrant.VagrantCommandTask - -String testScripts = '*.bats' -String testCommand = "cd \$TESTROOT && sudo bats --tap \$BATS/$testScripts" -String smokeTestCommand = 'echo I work' - -// the images we allow testing with -List availableBoxes = [ - 'centos-6', - 'centos-7', - 'debian-8', - 'fedora-24', - 'oel-6', - 'oel-7', - 'opensuse-13', - 'sles-12', - 'ubuntu-1204', - 'ubuntu-1404', - 'ubuntu-1604' -] - -String vagrantBoxes = getProperties().get('vagrant.boxes', 'sample') -List boxes = [] -for (String box : vagrantBoxes.split(',')) { - if (box == 'sample') { - boxes.add('centos-7') - boxes.add('ubuntu-1404') - } else if (box == 'all') { - boxes = availableBoxes - break - } else { - if (availableBoxes.contains(box) == false) { - throw new IllegalArgumentException("Unknown vagrant box '${box}'") - } - boxes.add(box) - } -} - -long seed -String formattedSeed = null -String[] upgradeFromVersions -String upgradeFromVersion - -String maybeTestsSeed = System.getProperty("tests.seed", null); -if (maybeTestsSeed != null) { - List seeds = maybeTestsSeed.tokenize(':') - if (seeds.size() != 0) { - String masterSeed = seeds.get(0) - seed = new BigInteger(masterSeed, 16).longValue() - formattedSeed = maybeTestsSeed - } -} -if (formattedSeed == null) { - seed = new Random().nextLong() - formattedSeed = String.format("%016X", seed) -} - -String maybeUpdradeFromVersions = System.getProperty("tests.packaging.upgrade.from.versions", null) -if (maybeUpdradeFromVersions != null) { - upgradeFromVersions = maybeUpdradeFromVersions.split(",") -} else { - upgradeFromVersions = new File(project.projectDir, 'versions') -} - -upgradeFromVersion = upgradeFromVersions[new Random(seed).nextInt(upgradeFromVersions.length)] - -configurations { - test -} - -repositories { - mavenCentral() // Try maven central first, it'll have releases before 5.0.0 - /* Setup a repository that tries to download from - https://artifacts.elastic.co/downloads/elasticsearch/[module]-[revision].[ext] - which should work for 5.0.0+. This isn't a real ivy repository but gradle - is fine with that */ - ivy { - artifactPattern "https://artifacts.elastic.co/downloads/elasticsearch/[module]-[revision].[ext]" - } -} +apply plugin: 'elasticsearch.vagrant' dependencies { - test project(path: ':distribution:tar', configuration: 'archives') - test project(path: ':distribution:rpm', configuration: 'archives') - test project(path: ':distribution:deb', configuration: 'archives') - // Collect all the plugins for (Project subproj : project.rootProject.subprojects) { if (subproj.path.startsWith(':plugins:')) { - test project(path: "${subproj.path}", configuration: 'zip') - } - } - - // The version of elasticsearch that we upgrade *from* - test "org.elasticsearch.distribution.deb:elasticsearch:$upgradeFromVersion@deb" - test "org.elasticsearch.distribution.rpm:elasticsearch:$upgradeFromVersion@rpm" -} - -task clean(type: Delete) { - group 'Build' - delete buildDir -} - -task stop { - group 'Verification' - description 'Stop any tasks from tests that still may be running' -} - -Set getVersions() { - Node xml - new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s -> - xml = new XmlParser().parse(s) - } - Set versions = new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /[5]\.\d\.\d/ }) - if (versions.isEmpty() == false) { - return versions; - } - - // If no version is found, we run the tests with the current version - return Collections.singleton(project.version); -} - -task updatePackagingTestUpgradeFromVersions { - group 'Verification' - description 'Update file containing options for the\n "starting" version in the "upgrade from" packaging tests.' - doLast { - Set versions = getVersions() - new File(project.projectDir, 'versions').text = versions.join('\n') + '\n' - } -} - -task verifyPackagingTestUpgradeFromVersions { - doLast { - String maybeUpdateFromVersions = System.getProperty("tests.packaging.upgrade.from.versions", null) - if (maybeUpdateFromVersions == null) { - Set versions = getVersions() - Set actualVersions = new TreeSet<>(Arrays.asList(upgradeFromVersions)) - if (!versions.equals(actualVersions)) { - throw new GradleException("out-of-date versions " + actualVersions + - ", expected " + versions + "; run gradle updatePackagingTestUpgradeFromVersions") - } + bats project(path: "${subproj.path}", configuration: 'zip') } } } - -File testRoot = new File("$buildDir/testroot") -task createTestRoot { - dependsOn verifyPackagingTestUpgradeFromVersions - outputs.dir testRoot - doLast { - testRoot.mkdirs() - } -} - -task createVersionFile(type: FileContentsTask) { - dependsOn createTestRoot - file "${testRoot}/version" - contents = version -} - -task createUpgradeFromFile(type: FileContentsTask) { - dependsOn createTestRoot - file "${testRoot}/upgrade_from_version" - contents = upgradeFromVersion -} - -task prepareTestRoot(type: Copy) { - description 'Dump bats test dependencies into the $TESTROOT' - into testRoot - from configurations.test - - dependsOn createVersionFile, createUpgradeFromFile - doFirst { - gradle.addBuildListener new BuildAdapter() { - @Override - void buildFinished(BuildResult result) { - if (result.failure) { - println "Reproduce with: gradle packagingTest -Pvagrant.boxes=${vagrantBoxes} -Dtests.seed=${formattedSeed} -Dtests.packaging.upgrade.from.versions=${upgradeFromVersions.join(",")}" - } - } - } - } -} - -task checkVagrantVersion(type: Exec) { - commandLine 'vagrant', '--version' - standardOutput = new ByteArrayOutputStream() - doLast { - String version = standardOutput.toString().trim() - if ((version ==~ /Vagrant 1\.[789]\..+/) == false) { - throw new InvalidUserDataException( - "Illegal version of vagrant [${version}]. Need [Vagrant 1.7+]") - } - } -} - -task vagrantSmokeTest { - group 'Verification' - description 'Smoke test the specified vagrant boxes' -} - -task packagingTest { - group 'Verification' - description "Tests yum/apt packages using vagrant and bats.\n" + - " Specify the vagrant boxes to test using the gradle property 'vagrant.boxes'.\n" + - " 'sample' can be used to test a single yum and apt box. 'all' can be used to\n" + - " test all available boxes. The available boxes are: \n" + - " ${availableBoxes}" -} - -// Each box gets it own set of tasks -for (String box : availableBoxes) { - String boxTask = box.capitalize().replace('-', '') - - // always add a halt task for all boxes, so clean makes sure they are all shutdown - Task halt = tasks.create("vagrant${boxTask}#halt", VagrantCommandTask) { - boxName box - args 'halt', box - } - stop.dependsOn(halt) - if (boxes.contains(box) == false) { - // we only need a halt task if this box was not specified - continue; - } - - Task update = tasks.create("vagrant${boxTask}#update", VagrantCommandTask) { - boxName box - args 'box', 'update', box - dependsOn checkVagrantVersion - } - - Task up = tasks.create("vagrant${boxTask}#up", VagrantCommandTask) { - boxName box - /* It's important that we try to reprovision the box even if it already - exists. That way updates to the vagrant configuration take automatically. - That isn't to say that the updates will always be compatible. Its ok to - just destroy the boxes if they get busted but that is a manual step - because its slow-ish. */ - /* We lock the provider to virtualbox because the Vagrantfile specifies - lots of boxes that only work properly in virtualbox. Virtualbox is - vagrant's default but its possible to change that default and folks do. - But the boxes that we use are unlikely to work properly with other - virtualization providers. Thus the lock. */ - args 'up', box, '--provision', '--provider', 'virtualbox' - /* It'd be possible to check if the box is already up here and output - SKIPPED but that would require running vagrant status which is slow! */ - dependsOn update - } - - Task smoke = tasks.create("vagrant${boxTask}#smoketest", Exec) { - dependsOn up - finalizedBy halt - commandLine 'vagrant', 'ssh', box, '--command', - "set -o pipefail && ${smokeTestCommand} | sed -ue 's/^/ ${box}: /'" - } - vagrantSmokeTest.dependsOn(smoke) - - Task packaging = tasks.create("packagingTest${boxTask}", BatsOverVagrantTask) { - dependsOn up - finalizedBy halt - boxName box - command testCommand - dependsOn prepareTestRoot - } - packagingTest.dependsOn(packaging) -} diff --git a/qa/vagrant/src/test/resources/packaging/scripts/20_tar_package.bats b/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats similarity index 95% rename from qa/vagrant/src/test/resources/packaging/scripts/20_tar_package.bats rename to qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats index 83c12f960e5..726cd5468ac 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/20_tar_package.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats @@ -29,9 +29,9 @@ # under the License. # Load test utilities -load packaging_test_utils -load tar -load plugins +load $BATS_UTILS/utils.bash +load $BATS_UTILS/tar.bash +load $BATS_UTILS/plugins.bash setup() { skip_not_tar_gz @@ -110,12 +110,12 @@ setup() { local temp=`mktemp -d` touch "$temp/jvm.options" chown -R elasticsearch:elasticsearch "$temp" - echo "-Xms264m" >> "$temp/jvm.options" - echo "-Xmx264m" >> "$temp/jvm.options" + echo "-Xms512m" >> "$temp/jvm.options" + echo "-Xmx512m" >> "$temp/jvm.options" export ES_JVM_OPTIONS="$temp/jvm.options" export ES_JAVA_OPTS="-XX:-UseCompressedOops" start_elasticsearch_service - curl -s -XGET localhost:9200/_nodes | fgrep '"heap_init_in_bytes":276824064' + curl -s -XGET localhost:9200/_nodes | fgrep '"heap_init_in_bytes":536870912' curl -s -XGET localhost:9200/_nodes | fgrep '"using_compressed_ordinary_object_pointers":"false"' stop_elasticsearch_service export ES_JVM_OPTIONS=$es_jvm_options diff --git a/qa/vagrant/src/test/resources/packaging/scripts/25_tar_plugins.bats b/qa/vagrant/src/test/resources/packaging/tests/25_tar_plugins.bats similarity index 100% rename from qa/vagrant/src/test/resources/packaging/scripts/25_tar_plugins.bats rename to qa/vagrant/src/test/resources/packaging/tests/25_tar_plugins.bats diff --git a/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats similarity index 98% rename from qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats rename to qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats index d435a76b9c7..b7e925f2899 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats @@ -30,9 +30,9 @@ # under the License. # Load test utilities -load packaging_test_utils -load os_package -load plugins +load $BATS_UTILS/utils.bash +load $BATS_UTILS/packages.bash +load $BATS_UTILS/plugins.bash # Cleans everything for the 1st execution setup() { diff --git a/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats similarity index 98% rename from qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats rename to qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats index b6ec78509d1..9a85afc9a63 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats @@ -29,9 +29,9 @@ # under the License. # Load test utilities -load packaging_test_utils -load os_package -load plugins +load $BATS_UTILS/utils.bash +load $BATS_UTILS/packages.bash +load $BATS_UTILS/plugins.bash # Cleans everything for the 1st execution setup() { diff --git a/qa/vagrant/src/test/resources/packaging/scripts/50_modules_and_plugins.bats b/qa/vagrant/src/test/resources/packaging/tests/50_modules_and_plugins.bats similarity index 100% rename from qa/vagrant/src/test/resources/packaging/scripts/50_modules_and_plugins.bats rename to qa/vagrant/src/test/resources/packaging/tests/50_modules_and_plugins.bats diff --git a/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats similarity index 98% rename from qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats rename to qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats index de1416059dd..7eaa0843f9f 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats @@ -29,9 +29,9 @@ # under the License. # Load test utilities -load packaging_test_utils -load os_package -load plugins +load $BATS_UTILS/utils.bash +load $BATS_UTILS/packages.bash +load $BATS_UTILS/plugins.bash # Cleans everything for the 1st execution setup() { diff --git a/qa/vagrant/src/test/resources/packaging/scripts/70_sysv_initd.bats b/qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats similarity index 84% rename from qa/vagrant/src/test/resources/packaging/scripts/70_sysv_initd.bats rename to qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats index fa96882f914..26c8c8082d1 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/70_sysv_initd.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats @@ -29,9 +29,9 @@ # under the License. # Load test utilities -load packaging_test_utils -load os_package -load plugins +load $BATS_UTILS/utils.bash +load $BATS_UTILS/packages.bash +load $BATS_UTILS/plugins.bash # Cleans everything for the 1st execution setup() { @@ -134,6 +134,26 @@ setup() { [ "$status" -eq 3 ] || [ "$status" -eq 4 ] } +@test "[INIT.D] start Elasticsearch with custom JVM options" { + assert_file_exist $ESENVFILE + local es_java_opts=$ES_JAVA_OPTS + local es_jvm_options=$ES_JVM_OPTIONS + local temp=`mktemp -d` + touch "$temp/jvm.options" + chown -R elasticsearch:elasticsearch "$temp" + echo "-Xms512m" >> "$temp/jvm.options" + echo "-Xmx512m" >> "$temp/jvm.options" + cp $ESENVFILE "$temp/elasticsearch" + echo "ES_JVM_OPTIONS=\"$temp/jvm.options\"" >> $ESENVFILE + echo "ES_JAVA_OPTS=\"-XX:-UseCompressedOops\"" >> $ESENVFILE + service elasticsearch start + wait_for_elasticsearch_status + curl -s -XGET localhost:9200/_nodes | fgrep '"heap_init_in_bytes":536870912' + curl -s -XGET localhost:9200/_nodes | fgrep '"using_compressed_ordinary_object_pointers":"false"' + service elasticsearch stop + cp "$temp/elasticsearch" $ESENVFILE +} + # Simulates the behavior of a system restart: # the PID directory is deleted by the operating system # but it should not block ES from starting diff --git a/qa/vagrant/src/test/resources/packaging/scripts/80_upgrade.bats b/qa/vagrant/src/test/resources/packaging/tests/80_upgrade.bats similarity index 98% rename from qa/vagrant/src/test/resources/packaging/scripts/80_upgrade.bats rename to qa/vagrant/src/test/resources/packaging/tests/80_upgrade.bats index feca52c7bbc..a14823a9cc4 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/80_upgrade.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/80_upgrade.bats @@ -31,8 +31,8 @@ # under the License. # Load test utilities -load packaging_test_utils -load os_package +load $BATS_UTILS/utils.bash +load $BATS_UTILS/packages.bash # Cleans everything for the 1st execution setup() { diff --git a/qa/vagrant/src/test/resources/packaging/scripts/90_reinstall.bats b/qa/vagrant/src/test/resources/packaging/tests/90_reinstall.bats similarity index 97% rename from qa/vagrant/src/test/resources/packaging/scripts/90_reinstall.bats rename to qa/vagrant/src/test/resources/packaging/tests/90_reinstall.bats index 3c2f7be7330..4dd682efbdd 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/90_reinstall.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/90_reinstall.bats @@ -31,8 +31,8 @@ # under the License. # Load test utilities -load packaging_test_utils -load os_package +load $BATS_UTILS/utils.bash +load $BATS_UTILS/packages.bash # Cleans everything for the 1st execution setup() { diff --git a/qa/vagrant/src/test/resources/packaging/scripts/example/scripts/is_guide.groovy b/qa/vagrant/src/test/resources/packaging/tests/example/scripts/is_guide.groovy similarity index 100% rename from qa/vagrant/src/test/resources/packaging/scripts/example/scripts/is_guide.groovy rename to qa/vagrant/src/test/resources/packaging/tests/example/scripts/is_guide.groovy diff --git a/qa/vagrant/src/test/resources/packaging/scripts/example/scripts/is_guide.mustache b/qa/vagrant/src/test/resources/packaging/tests/example/scripts/is_guide.mustache similarity index 100% rename from qa/vagrant/src/test/resources/packaging/scripts/example/scripts/is_guide.mustache rename to qa/vagrant/src/test/resources/packaging/tests/example/scripts/is_guide.mustache diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash similarity index 99% rename from qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash rename to qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash index b979f40e309..2ff853bc70b 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash @@ -39,9 +39,9 @@ # system uses. # Load test utilities -load packaging_test_utils -load modules -load plugins +load $BATS_UTILS/utils.bash +load $BATS_UTILS/modules.bash +load $BATS_UTILS/plugins.bash setup() { # The rules on when we should clean an reinstall are complex - all the @@ -60,7 +60,7 @@ setup() { } if [[ "$BATS_TEST_FILENAME" =~ 25_tar_plugins.bats$ ]]; then - load tar + load $BATS_UTILS/tar.bash GROUP='TAR PLUGINS' install() { install_archive @@ -70,7 +70,7 @@ if [[ "$BATS_TEST_FILENAME" =~ 25_tar_plugins.bats$ ]]; then export_elasticsearch_paths export ESPLUGIN_COMMAND_USER=elasticsearch else - load os_package + load $BATS_UTILS/packages.bash if is_rpm; then GROUP='RPM PLUGINS' elif is_dpkg; then diff --git a/qa/vagrant/src/test/resources/packaging/scripts/modules.bash b/qa/vagrant/src/test/resources/packaging/utils/modules.bash similarity index 100% rename from qa/vagrant/src/test/resources/packaging/scripts/modules.bash rename to qa/vagrant/src/test/resources/packaging/utils/modules.bash diff --git a/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash b/qa/vagrant/src/test/resources/packaging/utils/packages.bash similarity index 92% rename from qa/vagrant/src/test/resources/packaging/scripts/os_package.bash rename to qa/vagrant/src/test/resources/packaging/utils/packages.bash index 4948862d6ab..700c1c66185 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/packages.bash @@ -36,6 +36,12 @@ export_elasticsearch_paths() { export ESDATA="/var/lib/elasticsearch" export ESLOG="/var/log/elasticsearch" export ESPIDDIR="/var/run/elasticsearch" + if is_dpkg; then + export ESENVFILE="/etc/default/elasticsearch" + fi + if is_rpm; then + export ESENVFILE="/etc/sysconfig/elasticsearch" + fi } # Install the rpm or deb package. @@ -117,6 +123,11 @@ verify_package_installation() { assert_file "/usr/lib/systemd/system/elasticsearch.service" f root root 644 assert_file "/usr/lib/tmpfiles.d/elasticsearch.conf" f root root 644 assert_file "/usr/lib/sysctl.d/elasticsearch.conf" f root root 644 + if is_rpm; then + [[ $(/usr/sbin/sysctl vm.max_map_count) =~ "vm.max_map_count = 262144" ]] + else + [[ $(/sbin/sysctl vm.max_map_count) =~ "vm.max_map_count = 262144" ]] + fi fi if is_sysvinit; then diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash b/qa/vagrant/src/test/resources/packaging/utils/plugins.bash similarity index 100% rename from qa/vagrant/src/test/resources/packaging/scripts/plugins.bash rename to qa/vagrant/src/test/resources/packaging/utils/plugins.bash diff --git a/qa/vagrant/src/test/resources/packaging/scripts/tar.bash b/qa/vagrant/src/test/resources/packaging/utils/tar.bash similarity index 100% rename from qa/vagrant/src/test/resources/packaging/scripts/tar.bash rename to qa/vagrant/src/test/resources/packaging/utils/tar.bash diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/utils/utils.bash similarity index 100% rename from qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash rename to qa/vagrant/src/test/resources/packaging/utils/utils.bash diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yaml index eb651f6b157..7c7445fc67d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yaml @@ -1,8 +1,8 @@ --- "Help": - skip: - version: " - 5.0.99" - reason: templates were introduced in 5.1.0 + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 - do: cat.templates: help: true @@ -10,7 +10,7 @@ - match: $body: | /^ name .+ \n - template .+ \n + index_patterns .+ \n order .+ \n version .+ \n $/ @@ -30,16 +30,18 @@ --- "Normal templates": + - skip: - version: " - 5.0.99" - reason: templates were introduced in 5.1.0 + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: indices.put_template: name: test body: order: 0 version: 1 - template: test-* + index_patterns: test-* settings: number_of_shards: 1 number_of_replicas: 0 @@ -50,7 +52,7 @@ body: order: 1 version: 2 - template: test-2* + index_patterns: test-2* settings: number_of_shards: 1 number_of_replicas: 0 @@ -61,7 +63,7 @@ - match: $body: / (^|\n)test \s+ - test-\* \s+ + \[test-\*\] \s+ 0 \s+ 1 (\n|$) @@ -70,7 +72,7 @@ - match: $body: / (^|\n)test_2 \s+ - test-2\* \s+ + \[test-2\*\] \s+ 1 \s+ 2 (\n|$) @@ -78,16 +80,18 @@ --- "Filtered templates": + - skip: - version: " - 5.0.99" - reason: templates were introduced in 5.1.0 + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: indices.put_template: name: test body: order: 0 version: 1 - template: t* + index_patterns: t* settings: number_of_shards: 1 number_of_replicas: 0 @@ -98,7 +102,7 @@ body: order: 2 version: 1 - template: tea* + index_patterns: tea* settings: number_of_shards: 1 number_of_replicas: 0 @@ -111,7 +115,7 @@ $body: | /^ test \s+ - t\* \s+ + \[t\*\] \s+ 0 \s+ 1 \n @@ -120,15 +124,15 @@ --- "Column headers": - skip: - version: " - 5.0.99" - reason: templates were introduced in 5.1.0 + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 - do: indices.put_template: name: test body: order: 0 version: 1 - template: t* + index_patterns: t* settings: number_of_shards: 1 number_of_replicas: 0 @@ -141,14 +145,14 @@ - match: $body: | /^ - name \s+ - template \s+ - order \s+ + name \s+ + index_patterns \s+ + order \s+ version \n - test \s+ - t\* \s+ - 0 \s+ + test \s+ + \[t\*\] \s+ + 0 \s+ 1 \n $/ @@ -156,22 +160,22 @@ --- "Select columns": - skip: - version: " - 5.0.99" - reason: templates were introduced in 5.1.0 + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 - do: indices.put_template: name: test body: order: 0 version: 1 - template: t* + index_patterns: t* settings: number_of_shards: 1 number_of_replicas: 0 - do: cat.templates: - h: [name, template] + h: [name, index_patterns] v: true name: test* @@ -179,24 +183,24 @@ $body: | /^ name \s+ - template + index_patterns \n test \s+ - t\* + \[t\*\] \n $/ --- "Sort templates": - skip: - version: " - 5.0.99" - reason: templates were introduced in 5.1.0 + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 - do: indices.put_template: name: test body: order: 0 - template: t* + index_patterns: t* settings: number_of_shards: 1 number_of_replicas: 0 @@ -207,31 +211,65 @@ body: order: 0 version: 1 - template: te* + index_patterns: te* settings: number_of_shards: 1 number_of_replicas: 0 - do: cat.templates: - h: [name, template, version] + h: [name, index_patterns, version] s: [version] - match: $body: | /^ - test \s+ t\* \s+\n - test_1 \s+ te\* \s+ 1\n + test \s+ \[t\*\] \s+ \n + test_1 \s+ \[te\*\] \s+ 1 \n $/ - do: cat.templates: - h: [name, template, version] + h: [name, index_patterns, version] s: ["version:desc"] - match: $body: | /^ - test_1 \s+ te\* \s+ 1\n - test \s+ t\* \s+\n + test_1 \s+ \[te\*\] \s+ 1\n + test \s+ \[t\*\] \s+ \n + + $/ + +--- +"Multiple template": + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: + indices.put_template: + name: test_1 + body: + order: 0 + version: 1 + index_patterns: [t*, te*] + settings: + number_of_shards: 1 + number_of_replicas: 0 + + - do: + cat.templates: + h: [name, index_patterns] + v: true + + + - match: + $body: | + /^ + name \s+ + index_patterns + \n + test_1 \s+ + \[t\*,\ te\*\] + \n $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yaml index 727d0f4dbe1..0dd1a452548 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yaml @@ -6,6 +6,11 @@ setup: ignore: [404] --- "Test indices.exists_template": + + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: indices.exists_template: name: test @@ -16,7 +21,7 @@ setup: indices.put_template: name: test body: - template: 'test-*' + index_patterns: ['test-*'] settings: number_of_shards: 1 number_of_replicas: 0 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yaml index 35bd40678e4..4e21b818d62 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yaml @@ -3,7 +3,7 @@ setup: indices.put_template: name: test body: - template: test-* + index_patterns: test-* settings: number_of_shards: 1 number_of_replicas: 0 @@ -11,33 +11,45 @@ setup: --- "Get template": + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: indices.get_template: name: test - - match: {test.template: "test-*"} + - match: {test.index_patterns: ["test-*"]} - match: {test.settings: {index: {number_of_shards: '1', number_of_replicas: '0'}}} --- "Get all templates": + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: indices.put_template: name: test2 body: - template: test2-* + index_patterns: test2-* settings: number_of_shards: 1 - do: indices.get_template: {} - - match: {test.template: "test-*"} - - match: {test2.template: "test2-*"} + - match: {test.index_patterns: ["test-*"]} + - match: {test2.index_patterns: ["test2-*"]} --- "Get template with local flag": + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: indices.get_template: name: test @@ -48,6 +60,10 @@ setup: --- "Get template with flat settings and master timeout": + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: indices.get_template: name: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yaml index 98fd6b3a984..5527c023b13 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yaml @@ -27,3 +27,35 @@ name: test_alias - match: {test_index.aliases.test_alias: {}} + +--- +"Can't create alias with invalid characters": + - skip: + version: " - 5.0.99" + reason: alias name validation was introduced in 5.1.0 + + - do: + indices.create: + index: test_index + + - do: + catch: request + indices.put_alias: + index: test_index + name: test_* + +--- +"Can't create alias with the same name as an index": + + - do: + indices.create: + index: test_index + - do: + indices.create: + index: foo + + - do: + catch: request + indices.put_alias: + index: test_index + name: foo diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yaml index 3d70e930a04..8c5ba1c8d5e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yaml @@ -1,10 +1,15 @@ --- "Put template": + + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: indices.put_template: name: test body: - template: test-* + index_patterns: test-* settings: number_of_shards: 1 number_of_replicas: 0 @@ -14,16 +19,45 @@ name: test flat_settings: true - - match: {test.template: "test-*"} + - match: {test.index_patterns: ["test-*"]} - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} --- -"Put template with aliases": +"Put multiple template": + + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: indices.put_template: name: test body: - template: test-* + index_patterns: [test-*, test2-*] + settings: + number_of_shards: 1 + number_of_replicas: 0 + + - do: + indices.get_template: + name: test + flat_settings: true + + - match: {test.index_patterns: ["test-*", "test2-*"]} + - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} + +--- +"Put template with aliases": + + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + + - do: + indices.put_template: + name: test + body: + index_patterns: test-* aliases: test_alias: {} test_blias: { routing: b } @@ -33,7 +67,7 @@ indices.get_template: name: test - - match: { test.template: "test-*" } + - match: { test.index_patterns: ["test-*"] } - length: { test.aliases: 3 } - is_true: test.aliases.test_alias - match: { test.aliases.test_blias.index_routing: "b" } @@ -42,12 +76,17 @@ --- "Put template create": + + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: indices.put_template: name: test create: true body: - template: test-* + index_patterns: test-* settings: number_of_shards: 1 number_of_replicas: 0 @@ -57,7 +96,7 @@ name: test flat_settings: true - - match: {test.template: "test-*"} + - match: {test.index_patterns: ["test-*"]} - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} - do: @@ -66,20 +105,25 @@ name: test create: true body: - template: test-* + index_patterns: test-* settings: number_of_shards: 1 number_of_replicas: 0 --- "Test Put Versioned Template": + + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + - do: indices.put_template: name: "my_template" body: > { "version": 10, - "template": "*", + "index_patterns": "*", "settings": { "number_of_shards": 1 } } - match: { acknowledged: true } @@ -96,7 +140,7 @@ body: > { "version": 9, - "template": "*", + "index_patterns": "*", "settings": { "number_of_shards": 1 } } - match: { acknowledged: true } @@ -113,7 +157,7 @@ body: > { "version": 6789, - "template": "*", + "index_patterns": "*", "settings": { "number_of_shards": 1 } } - match: { acknowledged: true } @@ -129,7 +173,7 @@ name: "my_template" body: > { - "template": "*", + "index_patterns": "*", "settings": { "number_of_shards": 1 } } - match: { acknowledged: true } @@ -146,7 +190,7 @@ body: > { "version": 5385, - "template": "*", + "index_patterns": "*", "settings": { "number_of_shards": 1 } } - match: { acknowledged: true } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml index 62a75b0ff04..b391032bee3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml @@ -58,6 +58,7 @@ index: "source" target: "target" wait_for_active_shards: 1 + master_timeout: 10s body: settings: index.number_of_replicas: 0 diff --git a/settings.gradle b/settings.gradle index eda0ec8658a..58893345898 100644 --- a/settings.gradle +++ b/settings.gradle @@ -59,13 +59,14 @@ List projects = [ 'qa:evil-tests', 'qa:rolling-upgrade', 'qa:smoke-test-client', + 'qa:smoke-test-http', 'qa:smoke-test-ingest-with-all-dependencies', 'qa:smoke-test-ingest-disabled', 'qa:smoke-test-multinode', 'qa:smoke-test-rank-eval-with-mustache', 'qa:smoke-test-plugins', 'qa:smoke-test-reindex-with-painless', - 'qa:smoke-test-http', + 'qa:smoke-test-tribe-node', 'qa:vagrant', ] diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index 576b290ed40..fc455783575 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -18,19 +18,20 @@ */ package org.elasticsearch.cluster; +import java.util.Arrays; +import java.util.Collections; +import java.util.concurrent.CountDownLatch; + import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; -import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.monitor.fs.FsInfo; @@ -38,10 +39,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import java.util.Arrays; -import java.util.Collections; -import java.util.concurrent.CountDownLatch; - import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -51,11 +48,8 @@ import static java.util.Collections.emptySet; */ public class MockInternalClusterInfoService extends InternalClusterInfoService { - public static class TestPlugin extends Plugin { - public void onModule(ClusterModule module) { - module.clusterInfoServiceImpl = MockInternalClusterInfoService.class; - } - } + /** This is a marker plugin used to trigger MockNode to use this mock info service. */ + public static class TestPlugin extends Plugin {} private final ClusterName clusterName; private volatile NodeStats[] stats = new NodeStats[3]; @@ -75,12 +69,8 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { null, null, null); } - @Inject - public MockInternalClusterInfoService(Settings settings, ClusterSettings clusterSettings, - TransportNodesStatsAction transportNodesStatsAction, - TransportIndicesStatsAction transportIndicesStatsAction, - ClusterService clusterService, ThreadPool threadPool) { - super(settings, clusterSettings, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool); + public MockInternalClusterInfoService(Settings settings, ClusterService clusterService, ThreadPool threadPool, NodeClient client) { + super(settings, clusterService, threadPool, client); this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); stats[0] = makeStats("node_t1", new DiskUsage("node_t1", "n1", "/dev/null", 100, 100)); stats[1] = makeStats("node_t2", new DiskUsage("node_t2", "n2", "/dev/null", 100, 100)); diff --git a/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java b/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java index 7ddd2526fcd..c6065f7e583 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java @@ -33,11 +33,13 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -// this must exist in the same package as IndexModule to allow access to setting the impl +/** + * A plugin to use {@link MockEngineFactory}. + * + * Subclasses may override the reader wrapper used. + */ public class MockEngineFactoryPlugin extends Plugin { - private Class readerWrapper = AssertingDirectoryReader.class; - @Override public List> getSettings() { return Arrays.asList(MockEngineSupport.DISABLE_FLUSH_ON_CLOSE, MockEngineSupport.WRAP_READER_RATIO); @@ -45,22 +47,10 @@ public class MockEngineFactoryPlugin extends Plugin { @Override public void onIndexModule(IndexModule module) { - module.engineFactory.set(new MockEngineFactory(readerWrapper)); + module.engineFactory.set(new MockEngineFactory(getReaderWrapperClass())); } - @Override - public Collection createGuiceModules() { - return Collections.singleton(new MockEngineReaderModule()); - } - - public class MockEngineReaderModule extends AbstractModule { - - public void setReaderClass(Class readerWrapper) { - MockEngineFactoryPlugin.this.readerWrapper = readerWrapper; - } - - @Override - protected void configure() { - } + protected Class getReaderWrapperClass() { + return AssertingDirectoryReader.class; } } diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index 38e8a8436b1..c75d9bbcb6d 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -19,6 +19,9 @@ package org.elasticsearch.node; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.ClusterInfoService; +import org.elasticsearch.cluster.MockInternalClusterInfoService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -99,16 +102,6 @@ public class MockNode extends Node { } } - @Override - protected ZenPing newZenPing(Settings settings, ThreadPool threadPool, TransportService transportService, - UnicastHostsProvider hostsProvider) { - if (getPluginsService().filterPlugins(MockZenPing.TestPlugin.class).isEmpty()) { - return super.newZenPing(settings, threadPool, transportService, hostsProvider); - } else { - return new MockZenPing(settings); - } - } - @Override protected Node newTribeClientNode(Settings settings, Collection> classpathPlugins) { return new MockNode(settings, classpathPlugins); @@ -120,5 +113,15 @@ public class MockNode extends Node { clusterSettings.addSettingsUpdateConsumer(RecoverySettingsChunkSizePlugin.CHUNK_SIZE_SETTING, recoverySettings::setChunkSize); } } + + @Override + protected ClusterInfoService newClusterInfoService(Settings settings, ClusterService clusterService, + ThreadPool threadPool, NodeClient client) { + if (getPluginsService().filterPlugins(MockInternalClusterInfoService.TestPlugin.class).isEmpty()) { + return super.newClusterInfoService(settings, clusterService, threadPool, client); + } else { + return new MockInternalClusterInfoService(settings, clusterService, threadPool, client); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java index 0ece6fad393..1a7aac925f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.junit.listeners.LoggingListener; @@ -204,11 +205,6 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { return finalSettings.build(); } - @Override - protected boolean addMockZenPings() { - return false; - } - protected int minExternalNodes() { return 1; } protected int maxExternalNodes() { @@ -246,6 +242,7 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { protected Settings commonNodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder().put(requiredSettings()); builder.put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? "netty3" : "netty4"); // run same transport / disco as external + builder.put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false); return builder.build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 470d4f48df5..82e7ce072e0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -119,7 +119,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.MockSearchService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.client.RandomizingClient; -import org.elasticsearch.test.discovery.MockZenPing; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.MockTransportService; @@ -408,7 +408,7 @@ public abstract class ESIntegTestCase extends ESTestCase { } PutIndexTemplateRequestBuilder putTemplate = client().admin().indices() .preparePutTemplate("random_index_template") - .setTemplate("*") + .setPatterns(Collections.singletonList("*")) .setOrder(0) .setSettings(randomSettingsBuilder); if (mappings != null) { @@ -577,7 +577,7 @@ public abstract class ESIntegTestCase extends ESTestCase { return Collections.emptySet(); } - protected void beforeIndexDeletion() throws IOException { + protected void beforeIndexDeletion() throws Exception { cluster().beforeIndexDeletion(); } @@ -1805,10 +1805,6 @@ public abstract class ESIntegTestCase extends ESTestCase { return true; } - protected boolean addMockZenPings() { - return true; - } - /** * Returns a function that allows to wrap / filter all clients that are exposed by the test cluster. This is useful * for debugging or request / response pre and post processing. It also allows to intercept all calls done by the test @@ -1846,9 +1842,7 @@ public abstract class ESIntegTestCase extends ESTestCase { mocks.add(MockTcpTransportPlugin.class); } - if (addMockZenPings()) { - mocks.add(MockZenPing.TestPlugin.class); - } + mocks.add(TestZenDiscovery.TestPlugin.class); mocks.add(TestSeedPlugin.class); return Collections.unmodifiableList(mocks); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 9648eb5798e..0b2adfa52e1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -18,6 +18,13 @@ */ package org.elasticsearch.test; +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; @@ -45,7 +52,7 @@ import org.elasticsearch.node.NodeValidationException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.test.discovery.MockZenPing; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.MockTcpTransportPlugin; import org.junit.After; @@ -53,13 +60,6 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; -import java.io.IOException; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -87,7 +87,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { assertFalse(clusterHealthResponse.isTimedOut()); client().admin().indices() .preparePutTemplate("random_index_template") - .setTemplate("*") + .setPatterns(Collections.singletonList("*")) .setOrder(0) .setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get(); @@ -191,9 +191,9 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { plugins = new ArrayList<>(plugins); plugins.add(MockTcpTransportPlugin.class); } - if (plugins.contains(MockZenPing.TestPlugin.class) == false) { + if (plugins.contains(TestZenDiscovery.TestPlugin.class) == false) { plugins = new ArrayList<>(plugins); - plugins.add(MockZenPing.TestPlugin.class); + plugins.add(TestZenDiscovery.TestPlugin.class); } Node build = new MockNode(settings, plugins); try { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 06280ceb421..69c59086980 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -30,7 +30,10 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.status.StatusConsoleListener; import org.apache.logging.log4j.status.StatusData; import org.apache.logging.log4j.status.StatusLogger; @@ -154,12 +157,15 @@ public abstract class ESTestCase extends LuceneTestCase { static { System.setProperty("log4j.shutdownHookEnabled", "false"); - // we can not shutdown logging when tests are running or the next test that runs within the - // same JVM will try to initialize logging after a security manager has been installed and - // this will fail - System.setProperty("es.log4j.shutdownEnabled", "false"); System.setProperty("log4j2.disable.jmx", "true"); System.setProperty("log4j.skipJansi", "true"); // jython has this crazy shaded Jansi version that log4j2 tries to load + + // shutdown hook so that when the test JVM exits, logging is shutdown too + Runtime.getRuntime().addShutdownHook(new Thread(() -> { + LoggerContext context = (LoggerContext) LogManager.getContext(false); + Configurator.shutdown(context); + })); + BootstrapForTesting.ensureInitialized(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 0d19e03299a..37e3a58295e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1020,7 +1020,7 @@ public final class InternalTestCluster extends TestCluster { } @Override - public void beforeIndexDeletion() throws IOException { + public void beforeIndexDeletion() throws Exception { // Check that the operations counter on index shard has reached 0. // The assumption here is that after a test there are no ongoing write operations. // test that have ongoing write operations after the test (for example because ttl is used @@ -1055,33 +1055,40 @@ public final class InternalTestCluster extends TestCluster { } } - private void assertShardIndexCounter() throws IOException { - final Collection nodesAndClients = nodes.values(); - for (NodeAndClient nodeAndClient : nodesAndClients) { - IndicesService indexServices = getInstance(IndicesService.class, nodeAndClient.name); - for (IndexService indexService : indexServices) { - for (IndexShard indexShard : indexService) { - int activeOperationsCount = indexShard.getActiveOperationsCount(); - if (activeOperationsCount > 0) { - TaskManager taskManager = getInstance(TransportService.class, nodeAndClient.name).getTaskManager(); - DiscoveryNode localNode = getInstance(ClusterService.class, nodeAndClient.name).localNode(); - List taskInfos = taskManager.getTasks().values().stream() - .filter(task -> task instanceof ReplicationTask) - .map(task -> task.taskInfo(localNode.getId(), true)) - .collect(Collectors.toList()); - ListTasksResponse response = new ListTasksResponse(taskInfos, Collections.emptyList(), Collections.emptyList()); - XContentBuilder builder = XContentFactory.jsonBuilder() - .prettyPrint() - .startObject() - .value(response) - .endObject(); - throw new AssertionError("expected index shard counter on shard " + indexShard.shardId() + " on node " + - nodeAndClient.name + " to be 0 but was " + activeOperationsCount + ". Current replication tasks on node:\n" + - builder.string()); + private void assertShardIndexCounter() throws Exception { + assertBusy(() -> { + final Collection nodesAndClients = nodes.values(); + for (NodeAndClient nodeAndClient : nodesAndClients) { + IndicesService indexServices = getInstance(IndicesService.class, nodeAndClient.name); + for (IndexService indexService : indexServices) { + for (IndexShard indexShard : indexService) { + int activeOperationsCount = indexShard.getActiveOperationsCount(); + if (activeOperationsCount > 0) { + TaskManager taskManager = getInstance(TransportService.class, nodeAndClient.name).getTaskManager(); + DiscoveryNode localNode = getInstance(ClusterService.class, nodeAndClient.name).localNode(); + List taskInfos = taskManager.getTasks().values().stream() + .filter(task -> task instanceof ReplicationTask) + .map(task -> task.taskInfo(localNode.getId(), true)) + .collect(Collectors.toList()); + ListTasksResponse response = new ListTasksResponse(taskInfos, Collections.emptyList(), Collections.emptyList()); + XContentBuilder builder = null; + try { + builder = XContentFactory.jsonBuilder() + .prettyPrint() + .startObject() + .value(response) + .endObject(); + throw new AssertionError("expected index shard counter on shard " + indexShard.shardId() + " on node " + + nodeAndClient.name + " to be 0 but was " + activeOperationsCount + ". Current replication tasks on node:\n" + + builder.string()); + } catch (IOException e) { + throw new RuntimeException("caught exception while building response [" + response + "]", e); + } + } } } } - } + }); } private void randomlyResetClients() throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java index b960685777e..c2ac65d9980 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java @@ -82,7 +82,7 @@ public abstract class TestCluster implements Closeable { /** * Assertions that should run before the cluster is wiped should be called in this method */ - public void beforeIndexDeletion() throws IOException { + public void beforeIndexDeletion() throws Exception { } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java index 1d91b0980e4..5e1e1acd9ab 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java @@ -39,7 +39,7 @@ public class NoOpClient extends AbstractClient { } @Override - protected , + protected > void doExecute(Action action, Request request, ActionListener listener) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java index d5e7de1d9bf..c544b2bad88 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java @@ -18,39 +18,30 @@ */ package org.elasticsearch.test.discovery; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.discovery.DiscoveryModule; -import org.elasticsearch.discovery.zen.PingContextProvider; -import org.elasticsearch.discovery.zen.ZenPing; -import org.elasticsearch.plugins.DiscoveryPlugin; -import org.elasticsearch.plugins.Plugin; - import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.discovery.zen.PingContextProvider; +import org.elasticsearch.discovery.zen.ZenPing; + /** * A {@link ZenPing} implementation which returns results based on an static in-memory map. This allows pinging * to be immediate and can be used to speed up tests. */ public final class MockZenPing extends AbstractComponent implements ZenPing { - /** A marker plugin used by {@link org.elasticsearch.node.MockNode} to indicate this mock zen ping should be used. */ - public static class TestPlugin extends Plugin {} - static final Map> activeNodesPerCluster = ConcurrentCollections.newConcurrentMap(); private volatile PingContextProvider contextProvider; - @Inject public MockZenPing(Settings settings) { super(settings); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java new file mode 100644 index 00000000000..3ca66c11bf7 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.discovery; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.discovery.zen.UnicastHostsProvider; +import org.elasticsearch.discovery.zen.ZenDiscovery; +import org.elasticsearch.discovery.zen.ZenPing; +import org.elasticsearch.plugins.DiscoveryPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +/** + * A alternative zen discovery which allows using mocks for things like pings, as well as + * giving access to internals. + */ +public class TestZenDiscovery extends ZenDiscovery { + + public static final Setting USE_MOCK_PINGS = + Setting.boolSetting("discovery.zen.use_mock_pings", true, Setting.Property.NodeScope); + + /** A plugin which installs mock discovery and configures it to be used. */ + public static class TestPlugin extends Plugin implements DiscoveryPlugin { + private Settings settings; + public TestPlugin(Settings settings) { + this.settings = settings; + } + @Override + public Map> getDiscoveryTypes(ThreadPool threadPool, TransportService transportService, + ClusterService clusterService, UnicastHostsProvider hostsProvider) { + return Collections.singletonMap("test-zen", + () -> new TestZenDiscovery(settings, threadPool, transportService, clusterService, hostsProvider)); + } + + @Override + public List> getSettings() { + return Collections.singletonList(USE_MOCK_PINGS); + } + + @Override + public Settings additionalSettings() { + return Settings.builder().put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "test-zen").build(); + } + } + + private TestZenDiscovery(Settings settings, ThreadPool threadPool, TransportService transportService, + ClusterService clusterService, UnicastHostsProvider hostsProvider) { + super(settings, threadPool, transportService, clusterService, hostsProvider); + } + + @Override + protected ZenPing newZenPing(Settings settings, ThreadPool threadPool, TransportService transportService, + UnicastHostsProvider hostsProvider) { + if (USE_MOCK_PINGS.get(settings)) { + return new MockZenPing(settings); + } else { + return super.newZenPing(settings, threadPool, transportService, hostsProvider); + } + } + + public ZenPing getZenPing() { + return zenPing; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 18d6939dd4d..8cff517316b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -653,9 +653,15 @@ public class ElasticsearchAssertions { // streamable that comes in. } if (streamable instanceof ActionRequest) { - ((ActionRequest) streamable).validate(); + ((ActionRequest) streamable).validate(); + } + BytesReference orig; + try { + orig = serialize(version, streamable); + } catch (IllegalArgumentException e) { + // Can't serialize with this version so skip this test. + return; } - BytesReference orig = serialize(version, streamable); StreamInput input = orig.streamInput(); if (namedWriteableRegistry != null) { input = new NamedWriteableAwareStreamInput(input, namedWriteableRegistry); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 1e419faf06b..e05057648cc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -150,6 +150,16 @@ public class ESRestTestCase extends ESTestCase { return false; } + /** + * Controls whether or not to preserve templates upon completion of this test. The default implementation is to delete not preserve + * templates. + * + * @return whether or not to preserve templates + */ + protected boolean preserveTemplatesUponCompletion() { + return false; + } + private void wipeCluster() throws IOException { if (preserveIndicesUponCompletion() == false) { // wipe indices @@ -164,7 +174,9 @@ public class ESRestTestCase extends ESTestCase { } // wipe index templates - adminClient().performRequest("DELETE", "_template/*"); + if (preserveTemplatesUponCompletion() == false) { + adminClient().performRequest("DELETE", "_template/*"); + } wipeSnapshots(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java index da8c54396df..14affcaf3eb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java @@ -100,7 +100,7 @@ public class ClientYamlTestClient { Version version = null; Version masterVersion = null; for (String perNode : split) { - final String[] versionAndMaster = perNode.split(" "); + final String[] versionAndMaster = perNode.split("\\s+"); assert versionAndMaster.length == 2 : "invalid line: " + perNode + " length: " + versionAndMaster.length; final Version currentVersion = Version.fromString(versionAndMaster[0]); final boolean master = versionAndMaster[1].trim().equals("*"); diff --git a/test/framework/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertionsTests.java b/test/framework/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertionsTests.java new file mode 100644 index 00000000000..ef6d0265b59 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertionsTests.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.hamcrest; + +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.test.VersionUtils.randomVersion; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertVersionSerializable; + +public class ElasticsearchAssertionsTests extends ESTestCase { + public void testAssertVersionSerializableIsOkWithIllegalArgumentException() { + Version version = randomVersion(random()); + NamedWriteableRegistry registry = new NamedWriteableRegistry(emptyList()); + Streamable testStreamable = new TestStreamable(); + + // Should catch the exception and do nothing. + assertVersionSerializable(version, testStreamable, registry); + } + + public static class TestStreamable implements Streamable { + @Override + public void readFrom(StreamInput in) throws IOException { + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new IllegalArgumentException("Not supported."); + } + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 7c001f910d7..327a49d3678 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -19,24 +19,6 @@ */ package org.elasticsearch.test.test; -import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.DiscoverySettings; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.InternalTestCluster; -import org.elasticsearch.test.NodeConfigurationSource; -import org.elasticsearch.test.discovery.MockZenPing; -import org.elasticsearch.transport.MockTcpTransportPlugin; -import org.elasticsearch.transport.TransportSettings; - import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -52,6 +34,24 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.NodeConfigurationSource; +import org.elasticsearch.test.discovery.TestZenDiscovery; +import org.elasticsearch.transport.MockTcpTransportPlugin; +import org.elasticsearch.transport.TransportSettings; + import static org.elasticsearch.cluster.node.DiscoveryNode.Role.DATA; import static org.elasticsearch.cluster.node.DiscoveryNode.Role.INGEST; import static org.elasticsearch.cluster.node.DiscoveryNode.Role.MASTER; @@ -155,7 +155,7 @@ public class InternalTestClusterTests extends ESTestCase { String nodePrefix = "foobar"; Path baseDir = createTempDir(); - final List> mockPlugins = Arrays.asList(MockTcpTransportPlugin.class, MockZenPing.TestPlugin.class); + final List> mockPlugins = Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class); InternalTestCluster cluster0 = new InternalTestCluster(clusterSeed, baseDir, masterNodes, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, mockPlugins, Function.identity()); @@ -218,7 +218,7 @@ public class InternalTestClusterTests extends ESTestCase { Path baseDir = createTempDir(); InternalTestCluster cluster = new InternalTestCluster(clusterSeed, baseDir, masterNodes, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, - enableHttpPipelining, nodePrefix, Arrays.asList(MockTcpTransportPlugin.class, MockZenPing.TestPlugin.class), + enableHttpPipelining, nodePrefix, Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class), Function.identity()); try { cluster.beforeTest(random(), 0.0); @@ -296,7 +296,7 @@ public class InternalTestClusterTests extends ESTestCase { return Settings.builder() .put(NetworkModule.TRANSPORT_TYPE_KEY, MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME).build(); } - }, 0, randomBoolean(), "", Arrays.asList(MockTcpTransportPlugin.class, MockZenPing.TestPlugin.class), Function.identity()); + }, 0, randomBoolean(), "", Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class), Function.identity()); cluster.beforeTest(random(), 0.0); try { Map> pathsPerRole = new HashMap<>();